fix(es/minifier): Fix skipping logic of sequential inliner (#4469)

This commit is contained in:
Donny/강동윤 2022-04-28 20:41:41 +09:00 committed by GitHub
parent a72f436148
commit 233c4d5b86
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 261 additions and 162 deletions

View File

@ -0,0 +1,13 @@
const test = () => {
let a = 0;
let b = 0;
let c = [1, 2, 3, 4, 5].map((i) => {
a += i;
b += i;
return i;
});
return [a, b, c];
};
const [a, b, c] = test();
console.log("test", a, b, c);

View File

@ -1,9 +1,9 @@
ab.kind;
var x2 = {
var x1 = {
a: "foo",
b: 42
}, x2 = {
a: "foo",
b: !0
};
({
a: "foo",
b: 42
})[k] = "bar", x2[k] = "bar", s2 = s1 = s2, t2 = t1 = t2;
x1[k] = "bar", x2[k] = "bar", s2 = s1 = s2, t2 = t1 = t2;

View File

@ -1,9 +1,9 @@
ab.kind, f10(a1), f10(a2);
var x2 = {
var x1 = {
a: "foo",
b: 42
}, x2 = {
a: "foo",
b: !0
};
({
a: "foo",
b: 42
})[k] = "bar", x2[k] = "bar", s2 = s1 = s2, t2 = t1 = t2;
x1[k] = "bar", x2[k] = "bar", s2 = s1 = s2, t2 = t1 = t2;

View File

@ -0,0 +1,6 @@
var x, y;
x = {
t: 1000
}, y = {
t: 3000
}, x.t, y.t;

View File

@ -0,0 +1,6 @@
var x, y;
x = {
t: 1000
}, y = {
t: 3000
}, x.t, y.t;

View File

@ -855,10 +855,6 @@ where
return false;
}
if !e.may_have_side_effects() {
return true;
}
match e {
Expr::Ident(e) => {
if let Some(a) = a {
@ -984,6 +980,10 @@ where
_ => {}
}
if !e.may_have_side_effects() {
return true;
}
log_abort!("sequences: skip: Unknown expr: {}", dump(e, true));
false

View File

@ -9789,3 +9789,71 @@ fn try_catch_5() {
run_default_exec_test(src);
}
#[test]
fn issue_4444_1() {
let src = r###"
const test = () => {
let a = 0;
let b = 0;
let c = [1, 2, 3, 4, 5].map((i) => {
a += i;
b += i;
return i;
});
return [a, b, c];
};
const [a, b, c] = test();
console.log("test", a, b, c);
"###;
let config = r###"
{
"arguments": false,
"arrows": false,
"booleans": true,
"booleans_as_integers": false,
"collapse_vars": true,
"comparisons": true,
"computed_props": false,
"conditionals": false,
"dead_code": false,
"directives": false,
"drop_console": false,
"drop_debugger": true,
"evaluate": false,
"expression": false,
"hoist_funs": false,
"hoist_props": false,
"hoist_vars": false,
"if_return": true,
"join_vars": false,
"keep_classnames": false,
"keep_fargs": true,
"keep_fnames": false,
"keep_infinity": false,
"loops": true,
"negate_iife": false,
"properties": true,
"reduce_funcs": false,
"reduce_vars": false,
"side_effects": true,
"switches": false,
"typeofs": false,
"unsafe": false,
"unsafe_arrows": false,
"unsafe_comps": false,
"unsafe_Function": false,
"unsafe_math": false,
"unsafe_symbols": false,
"unsafe_methods": false,
"unsafe_proto": false,
"unsafe_regexp": false,
"unsafe_undefined": false,
"unused": true
}
"###;
run_exec_test(src, config, false);
}

View File

@ -13501,9 +13501,10 @@
}, R.updateQueue = b, b.lastEffect = a.next = a) : null === (c = b.lastEffect) ? b.lastEffect = a.next = a : (d = c.next, c.next = a, a.next = d, b.lastEffect = a), a;
}
function Sh(a) {
var b = Hh();
return a = {
current: a
}, Hh().memoizedState = a;
}, b.memoizedState = a;
}
function Th() {
return Ih().memoizedState;

View File

@ -578,11 +578,11 @@
0,
0,
0
], ctx = canvas.getContext('2d');
], result = [], ctx = canvas.getContext('2d');
if (!ctx) throw new Error('Unable to get canvas context');
for(var frame = ctx.getImageData(from.x, from.y, this.size.x, this.size.y), data = frame.data, length = this.data.length; length--;){
hsv[0] = this.data[length] * adjustedScale;
var pos = 4 * length, _result = hsv[0] <= 0 ? whiteRgb : hsv[0] >= 360 ? blackRgb : Object(_cv_utils__WEBPACK_IMPORTED_MODULE_5__.g)(hsv, rgb), _result2 = _babel_runtime_helpers_slicedToArray__WEBPACK_IMPORTED_MODULE_0___default()(_result, 3);
hsv[0] = this.data[length] * adjustedScale, result = hsv[0] <= 0 ? whiteRgb : hsv[0] >= 360 ? blackRgb : Object(_cv_utils__WEBPACK_IMPORTED_MODULE_5__.g)(hsv, rgb);
var pos = 4 * length, _result = result, _result2 = _babel_runtime_helpers_slicedToArray__WEBPACK_IMPORTED_MODULE_0___default()(_result, 3);
data[pos] = _result2[0], data[pos + 1] = _result2[1], data[pos + 2] = _result2[2], data[pos + 3] = 255;
}
ctx.putImageData(frame, from.x, from.y);

View File

@ -7608,7 +7608,7 @@
}, getWorkerString = function(fn) {
return fn.toString().replace(/^function.+?{/, '').slice(0, -1);
}, workerCode$1 = transform1(getWorkerString(function() {
var _TransportPacketStream, _TransportParseStream, _ElementaryStream, _AdtsStream, _H264Stream, _NalByteStream, PROFILES_WITH_OPTIONAL_SPS_DATA, _AacStream, _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream, getTimescaleFromMediaHeader, Stream = function() {
var _TransportPacketStream, _TransportParseStream, _ElementaryStream, _AdtsStream, _H264Stream, _NalByteStream, PROFILES_WITH_OPTIONAL_SPS_DATA, _AacStream, _VideoSegmentStream, _AudioSegmentStream, _Transmuxer, _CoalesceStream, timescale1, startTime1, compositionStartTime, getVideoTrackIds, getTracks, getTimescaleFromMediaHeader, Stream = function() {
this.init = function() {
var listeners = {};
this.on = function(type, listener) {
@ -8432,7 +8432,7 @@
}, trun$1 = function(track, offset) {
return 'audio' === track.type ? audioTrun(track, offset) : videoTrun(track, offset);
};
var box, dinf, esds, ftyp, mdat1, mfhd, minf, moof1, moov, mvex, mvhd, trak1, tkhd1, mdia, mdhd1, hdlr1, sdtp, stbl, stsd1, traf1, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS, silence, secondsToVideoTs, secondsToAudioTs, videoTsToSeconds, audioTsToSeconds, mp4Generator = {
var box, dinf, esds, ftyp, mdat1, mfhd, minf, moof1, moov, mvex, mvhd, trak1, tkhd1, mdia, mdhd1, hdlr1, sdtp, stbl, stsd1, traf1, trex, trun$1, types, MAJOR_BRAND, MINOR_VERSION, AVC1_BRAND, VIDEO_HDLR, AUDIO_HDLR, HDLR_TYPES, VMHD, SMHD, DREF, STCO, STSC, STSZ, STTS, silence, secondsToVideoTs, secondsToAudioTs, videoTsToSeconds, audioTsToSeconds, audioTsToVideoTs, videoTsToAudioTs, metadataTsToSeconds, mp4Generator = {
ftyp: ftyp,
mdat: mdat1,
moof: moof1,
@ -8742,29 +8742,31 @@
}, {}));
}
return silence;
}, clock = {
};
secondsToVideoTs = function(seconds) {
return 90000 * seconds;
}, secondsToAudioTs = function(seconds, sampleRate) {
return seconds * sampleRate;
}, videoTsToSeconds = function(timestamp) {
return timestamp / 90000;
}, audioTsToSeconds = function(timestamp, sampleRate) {
return timestamp / sampleRate;
}, audioTsToVideoTs = function(timestamp, sampleRate) {
return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
}, videoTsToAudioTs = function(timestamp, sampleRate) {
return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
}, metadataTsToSeconds = function(timestamp, timelineStartPts, keepOriginalTimestamps) {
return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
};
var clock = {
ONE_SECOND_IN_TS: 90000,
secondsToVideoTs: secondsToVideoTs = function(seconds) {
return 90000 * seconds;
},
secondsToAudioTs: secondsToAudioTs = function(seconds, sampleRate) {
return seconds * sampleRate;
},
videoTsToSeconds: videoTsToSeconds = function(timestamp) {
return timestamp / 90000;
},
audioTsToSeconds: audioTsToSeconds = function(timestamp, sampleRate) {
return timestamp / sampleRate;
},
audioTsToVideoTs: function(timestamp, sampleRate) {
return secondsToVideoTs(audioTsToSeconds(timestamp, sampleRate));
},
videoTsToAudioTs: function(timestamp, sampleRate) {
return secondsToAudioTs(videoTsToSeconds(timestamp), sampleRate);
},
metadataTsToSeconds: function(timestamp, timelineStartPts, keepOriginalTimestamps) {
return videoTsToSeconds(keepOriginalTimestamps ? timestamp : timestamp - timelineStartPts);
}
secondsToVideoTs: secondsToVideoTs,
secondsToAudioTs: secondsToAudioTs,
videoTsToSeconds: videoTsToSeconds,
audioTsToSeconds: audioTsToSeconds,
audioTsToVideoTs: audioTsToVideoTs,
videoTsToAudioTs: videoTsToAudioTs,
metadataTsToSeconds: metadataTsToSeconds
}, sumFrameByteLengths = function(array) {
var i, sum = 0;
for(i = 0; i < array.length; i++)sum += array[i].data.byteLength;
@ -10451,123 +10453,125 @@
logs: []
}, this.resetCaptionStream();
}, this.reset();
}, toUnsigned = bin.toUnsigned, toHexString = bin.toHexString, probe$2 = {
}, toUnsigned = bin.toUnsigned, toHexString = bin.toHexString;
timescale1 = function(init) {
return findBox_1(init, [
'moov',
'trak'
]).reduce(function(result, trak) {
var tkhd, index, id, mdhd;
return (tkhd = findBox_1(trak, [
'tkhd'
])[0]) && (index = 0 === tkhd[0] ? 12 : 20, id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]), mdhd = findBox_1(trak, [
'mdia',
'mdhd'
])[0]) ? (index = 0 === mdhd[0] ? 12 : 20, result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]), result) : null;
}, {});
}, startTime1 = function(timescale, fragment) {
var trafs, baseTimes, result2;
return trafs = findBox_1(fragment, [
'moof',
'traf'
]), baseTimes = [].concat.apply([], trafs.map(function(traf) {
return findBox_1(traf, [
'tfhd'
]).map(function(tfhd) {
var scale, baseTime;
return scale = timescale[toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7])] || 90e3, (baseTime = 'number' != typeof (baseTime = findBox_1(traf, [
'tfdt'
]).map(function(tfdt) {
var version, result;
return version = tfdt[0], result = toUnsigned(tfdt[4] << 24 | tfdt[5] << 16 | tfdt[6] << 8 | tfdt[7]), 1 === version && (result *= 4294967296, result += toUnsigned(tfdt[8] << 24 | tfdt[9] << 16 | tfdt[10] << 8 | tfdt[11])), result;
})[0]) || isNaN(baseTime) ? 1 / 0 : baseTime) / scale;
});
})), isFinite(result2 = Math.min.apply(null, baseTimes)) ? result2 : 0;
}, compositionStartTime = function(timescales, fragment) {
var trackId, trafBoxes = findBox_1(fragment, [
'moof',
'traf'
]), baseMediaDecodeTime = 0, compositionTimeOffset = 0;
if (trafBoxes && trafBoxes.length) {
var tfhd = findBox_1(trafBoxes[0], [
'tfhd'
])[0], trun = findBox_1(trafBoxes[0], [
'trun'
])[0], tfdt = findBox_1(trafBoxes[0], [
'tfdt'
])[0];
if (tfhd && (trackId = parseTfhd(tfhd).trackId), tfdt && (baseMediaDecodeTime = parseTfdt(tfdt).baseMediaDecodeTime), trun) {
var parsedTrun = parseTrun(trun);
parsedTrun.samples && parsedTrun.samples.length && (compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0);
}
}
return (baseMediaDecodeTime + compositionTimeOffset) / (timescales[trackId] || 90e3);
}, getVideoTrackIds = function(init) {
var traks = findBox_1(init, [
'moov',
'trak'
]), videoTrackIds = [];
return traks.forEach(function(trak) {
var hdlrs = findBox_1(trak, [
'mdia',
'hdlr'
]), tkhds = findBox_1(trak, [
'tkhd'
]);
hdlrs.forEach(function(hdlr, index) {
var view, trackId, handlerType = parseType_1(hdlr.subarray(8, 12)), tkhd = tkhds[index];
'vide' === handlerType && (trackId = 0 === (view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength)).getUint8(0) ? view.getUint32(12) : view.getUint32(20), videoTrackIds.push(trackId));
});
}), videoTrackIds;
}, getTimescaleFromMediaHeader = function(mdhd) {
var index = 0 === mdhd[0] ? 12 : 20;
return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
}, getTracks = function(init) {
var traks = findBox_1(init, [
'moov',
'trak'
]), tracks = [];
return traks.forEach(function(trak) {
var track = {}, tkhd = findBox_1(trak, [
'tkhd'
])[0];
tkhd && (tkhdVersion = (view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength)).getUint8(0), track.id = 0 === tkhdVersion ? view.getUint32(12) : view.getUint32(20));
var hdlr = findBox_1(trak, [
'mdia',
'hdlr'
])[0];
if (hdlr) {
var type = parseType_1(hdlr.subarray(8, 12));
'vide' === type ? track.type = 'video' : 'soun' === type ? track.type = 'audio' : track.type = type;
}
var stsd = findBox_1(trak, [
'mdia',
'minf',
'stbl',
'stsd'
])[0];
if (stsd) {
var view, tkhdVersion, codecConfig, sampleDescriptions = stsd.subarray(8);
track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
var codecBox = findBox_1(sampleDescriptions, [
track.codec
])[0];
codecBox && (/^[asm]vc[1-9]$/i.test(track.codec) ? 'avcC' === parseType_1((codecConfig = codecBox.subarray(78)).subarray(4, 8)) && codecConfig.length > 11 ? (track.codec += '.', track.codec += toHexString(codecConfig[9]), track.codec += toHexString(codecConfig[10]), track.codec += toHexString(codecConfig[11])) : track.codec = 'avc1.4d400d' : /^mp4[a,v]$/i.test(track.codec) ? 'esds' === parseType_1((codecConfig = codecBox.subarray(28)).subarray(4, 8)) && codecConfig.length > 20 && 0 !== codecConfig[19] ? (track.codec += '.' + toHexString(codecConfig[19]), track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '')) : track.codec = 'mp4a.40.2' : track.codec = track.codec.toLowerCase());
}
var mdhd = findBox_1(trak, [
'mdia',
'mdhd'
])[0];
mdhd && (track.timescale = getTimescaleFromMediaHeader(mdhd)), tracks.push(track);
}), tracks;
};
var probe$2 = {
findBox: findBox_1,
parseType: parseType_1,
timescale: function(init) {
return findBox_1(init, [
'moov',
'trak'
]).reduce(function(result, trak) {
var tkhd, index, id, mdhd;
return (tkhd = findBox_1(trak, [
'tkhd'
])[0]) && (index = 0 === tkhd[0] ? 12 : 20, id = toUnsigned(tkhd[index] << 24 | tkhd[index + 1] << 16 | tkhd[index + 2] << 8 | tkhd[index + 3]), mdhd = findBox_1(trak, [
'mdia',
'mdhd'
])[0]) ? (index = 0 === mdhd[0] ? 12 : 20, result[id] = toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]), result) : null;
}, {});
},
startTime: function(timescale, fragment) {
var trafs, baseTimes, result2;
return trafs = findBox_1(fragment, [
'moof',
'traf'
]), baseTimes = [].concat.apply([], trafs.map(function(traf) {
return findBox_1(traf, [
'tfhd'
]).map(function(tfhd) {
var scale, baseTime;
return scale = timescale[toUnsigned(tfhd[4] << 24 | tfhd[5] << 16 | tfhd[6] << 8 | tfhd[7])] || 90e3, (baseTime = 'number' != typeof (baseTime = findBox_1(traf, [
'tfdt'
]).map(function(tfdt) {
var version, result;
return version = tfdt[0], result = toUnsigned(tfdt[4] << 24 | tfdt[5] << 16 | tfdt[6] << 8 | tfdt[7]), 1 === version && (result *= 4294967296, result += toUnsigned(tfdt[8] << 24 | tfdt[9] << 16 | tfdt[10] << 8 | tfdt[11])), result;
})[0]) || isNaN(baseTime) ? 1 / 0 : baseTime) / scale;
});
})), isFinite(result2 = Math.min.apply(null, baseTimes)) ? result2 : 0;
},
compositionStartTime: function(timescales, fragment) {
var trackId, trafBoxes = findBox_1(fragment, [
'moof',
'traf'
]), baseMediaDecodeTime = 0, compositionTimeOffset = 0;
if (trafBoxes && trafBoxes.length) {
var tfhd = findBox_1(trafBoxes[0], [
'tfhd'
])[0], trun = findBox_1(trafBoxes[0], [
'trun'
])[0], tfdt = findBox_1(trafBoxes[0], [
'tfdt'
])[0];
if (tfhd && (trackId = parseTfhd(tfhd).trackId), tfdt && (baseMediaDecodeTime = parseTfdt(tfdt).baseMediaDecodeTime), trun) {
var parsedTrun = parseTrun(trun);
parsedTrun.samples && parsedTrun.samples.length && (compositionTimeOffset = parsedTrun.samples[0].compositionTimeOffset || 0);
}
}
return (baseMediaDecodeTime + compositionTimeOffset) / (timescales[trackId] || 90e3);
},
videoTrackIds: function(init) {
var traks = findBox_1(init, [
'moov',
'trak'
]), videoTrackIds = [];
return traks.forEach(function(trak) {
var hdlrs = findBox_1(trak, [
'mdia',
'hdlr'
]), tkhds = findBox_1(trak, [
'tkhd'
]);
hdlrs.forEach(function(hdlr, index) {
var view, trackId, handlerType = parseType_1(hdlr.subarray(8, 12)), tkhd = tkhds[index];
'vide' === handlerType && (trackId = 0 === (view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength)).getUint8(0) ? view.getUint32(12) : view.getUint32(20), videoTrackIds.push(trackId));
});
}), videoTrackIds;
},
tracks: function(init) {
var traks = findBox_1(init, [
'moov',
'trak'
]), tracks = [];
return traks.forEach(function(trak) {
var track = {}, tkhd = findBox_1(trak, [
'tkhd'
])[0];
tkhd && (tkhdVersion = (view = new DataView(tkhd.buffer, tkhd.byteOffset, tkhd.byteLength)).getUint8(0), track.id = 0 === tkhdVersion ? view.getUint32(12) : view.getUint32(20));
var hdlr = findBox_1(trak, [
'mdia',
'hdlr'
])[0];
if (hdlr) {
var type = parseType_1(hdlr.subarray(8, 12));
'vide' === type ? track.type = 'video' : 'soun' === type ? track.type = 'audio' : track.type = type;
}
var stsd = findBox_1(trak, [
'mdia',
'minf',
'stbl',
'stsd'
])[0];
if (stsd) {
var view, tkhdVersion, codecConfig, sampleDescriptions = stsd.subarray(8);
track.codec = parseType_1(sampleDescriptions.subarray(4, 8));
var codecBox = findBox_1(sampleDescriptions, [
track.codec
])[0];
codecBox && (/^[asm]vc[1-9]$/i.test(track.codec) ? 'avcC' === parseType_1((codecConfig = codecBox.subarray(78)).subarray(4, 8)) && codecConfig.length > 11 ? (track.codec += '.', track.codec += toHexString(codecConfig[9]), track.codec += toHexString(codecConfig[10]), track.codec += toHexString(codecConfig[11])) : track.codec = 'avc1.4d400d' : /^mp4[a,v]$/i.test(track.codec) ? 'esds' === parseType_1((codecConfig = codecBox.subarray(28)).subarray(4, 8)) && codecConfig.length > 20 && 0 !== codecConfig[19] ? (track.codec += '.' + toHexString(codecConfig[19]), track.codec += '.' + toHexString(codecConfig[20] >>> 2 & 0x3f).replace(/^0/, '')) : track.codec = 'mp4a.40.2' : track.codec = track.codec.toLowerCase());
}
var mdhd = findBox_1(trak, [
'mdia',
'mdhd'
])[0];
mdhd && (track.timescale = getTimescaleFromMediaHeader(mdhd)), tracks.push(track);
}), tracks;
},
getTimescaleFromMediaHeader: getTimescaleFromMediaHeader = function(mdhd) {
var index = 0 === mdhd[0] ? 12 : 20;
return toUnsigned(mdhd[index] << 24 | mdhd[index + 1] << 16 | mdhd[index + 2] << 8 | mdhd[index + 3]);
}
timescale: timescale1,
startTime: startTime1,
compositionStartTime: compositionStartTime,
videoTrackIds: getVideoTrackIds,
tracks: getTracks,
getTimescaleFromMediaHeader: getTimescaleFromMediaHeader
}, parsePid = function(packet) {
var pid = 0x1f & packet[1];
return pid <<= 8, pid |= packet[2];

View File

@ -2561,14 +2561,14 @@
function setSelectionNoUndo(doc, sel, options) {
if (hasHandler(doc, "beforeSelectionChange") || doc.cm && hasHandler(doc.cm, "beforeSelectionChange")) {
var doc6, sel3, options2, obj;
sel = (doc6 = doc, sel3 = sel, options2 = options, (signal(doc6, "beforeSelectionChange", doc6, obj = {
sel = (doc6 = doc, sel3 = sel, options2 = options, obj = {
ranges: sel3.ranges,
update: function(ranges) {
this.ranges = [];
for(var i = 0; i < ranges.length; i++)this.ranges[i] = new Range(clipPos(doc6, ranges[i].anchor), clipPos(doc6, ranges[i].head));
},
origin: options2 && options2.origin
}), doc6.cm && signal(doc6.cm, "beforeSelectionChange", doc6.cm, obj), obj.ranges != sel3.ranges) ? normalizeSelection(doc6.cm, obj.ranges, obj.ranges.length - 1) : sel3);
}, (signal(doc6, "beforeSelectionChange", doc6, obj), doc6.cm && signal(doc6.cm, "beforeSelectionChange", doc6.cm, obj), obj.ranges != sel3.ranges) ? normalizeSelection(doc6.cm, obj.ranges, obj.ranges.length - 1) : sel3);
}
var bias = options && options.bias || (0 > cmp(sel.primary().head, doc.sel.primary().head) ? -1 : 1);
setSelectionInner(doc, skipAtomicInSelection(doc, sel, bias, !0)), !(options && !1 === options.scroll) && doc.cm && "nocursor" != doc.cm.getOption("readOnly") && ensureCursorVisible(doc.cm);

View File

@ -3060,9 +3060,10 @@
];
},
useRef: function(a) {
var b = $h();
return a = {
current: a
}, $h().memoizedState = a;
}, b.memoizedState = a;
},
useState: mi,
useDebugValue: wi,

View File

@ -6503,10 +6503,10 @@
return workInProgress.memoizedState = null, _primaryChildFragment6;
}
function mountSuspensePrimaryChildren(workInProgress, primaryChildren, renderLanes) {
var primaryChildFragment = createFiberFromOffscreen({
var mode = workInProgress.mode, primaryChildFragment = createFiberFromOffscreen({
mode: 'visible',
children: primaryChildren
}, workInProgress.mode, renderLanes, null);
}, mode, renderLanes, null);
return primaryChildFragment.return = workInProgress, workInProgress.child = primaryChildFragment, primaryChildFragment;
}
function mountSuspenseFallbackChildren(workInProgress, primaryChildren, fallbackChildren, renderLanes) {