fix: prevent asset conflicts between React and Grid.js versions

Add coexistence checks to all enqueue methods to prevent loading
both React and Grid.js assets simultaneously.

Changes:
- ReactAdmin.php: Only enqueue React assets when ?react=1
- Init.php: Skip Grid.js when React active on admin pages
- Form.php, Coupon.php, Access.php: Restore classic assets when ?react=0
- Customer.php, Product.php, License.php: Add coexistence checks

Now the toggle between Classic and React versions works correctly.

Co-authored-by: Claude Opus 4.7 <noreply@anthropic.com>
This commit is contained in:
dwindown
2026-04-18 17:02:14 +07:00
parent bd9cdac02e
commit e8fbfb14c1
74973 changed files with 6658406 additions and 71 deletions

238
node_modules/speedline-core/lib/frame.js generated vendored Normal file
View File

@@ -0,0 +1,238 @@
'use strict';
const fs = require('fs');
const jpeg = require('jpeg-js');
/**
* @typedef {import('../speedline').IncludeType} IncludeType
* @typedef {import('../speedline').Options<IncludeType>} Options
* @typedef {import('../speedline').TraceEvent} TraceEvent
* @typedef {import('../speedline').Output['frames'][number]} Frame
* @typedef {import('jpeg-js').RawImageData<Buffer>} ImageData
*/
/**
* @param {number} x
* @param {number} y
* @param {number} channel
* @param {number} width
* @param {Buffer} buff
*/
function getPixel(x, y, channel, width, buff) {
return buff[(x + y * width) * 4 + channel];
}
/**
* @param {number} i
* @param {number} j
* @param {ImageData} img
*/
function isWhitePixel(i, j, img) {
return getPixel(i, j, 0, img.width, img.data) >= 249 &&
getPixel(i, j, 1, img.width, img.data) >= 249 &&
getPixel(i, j, 2, img.width, img.data) >= 249;
}
/** @param {ImageData} img */
function convertPixelsToHistogram(img) {
const createHistogramArray = function () {
const ret = [];
for (let i = 0; i < 256; i++) {
ret[i] = 0;
}
return ret;
};
const width = img.width;
const height = img.height;
const histograms = [
createHistogramArray(),
createHistogramArray(),
createHistogramArray()
];
for (let j = 0; j < height; j++) {
for (let i = 0; i < width; i++) {
// Erase pixels considered as white
if (isWhitePixel(i, j, img)) {
continue;
}
for (let channel = 0; channel < histograms.length; channel++) {
const pixelValue = getPixel(i, j, channel, width, img.data);
histograms[channel][pixelValue]++;
}
}
}
return histograms;
}
/** @param {Array<Frame>} frames */
function synthesizeWhiteFrame(frames) {
const firstImageData = jpeg.decode(frames[0].getImage());
const width = firstImageData.width;
const height = firstImageData.height;
const frameData = Buffer.alloc(width * height * 4);
let i = 0;
while (i < frameData.length) {
frameData[i++] = 0xFF; // red
frameData[i++] = 0xFF; // green
frameData[i++] = 0xFF; // blue
frameData[i++] = 0xFF; // alpha - ignored in JPEGs
}
var jpegImageData = jpeg.encode({
data: frameData,
width: width,
height: height
});
return jpegImageData.data;
}
const screenshotTraceCategory = 'disabled-by-default-devtools.screenshot';
/**
* @param {string|Array<TraceEvent>|{traceEvents: Array<TraceEvent>}} timeline
* @param {Options} opts
*/
function extractFramesFromTimeline(timeline, opts) {
opts = opts || {};
/** @type {Array<TraceEvent>|{traceEvents: Array<TraceEvent>}} */
let trace;
timeline = typeof timeline === 'string' ? fs.readFileSync(timeline, 'utf-8') : timeline;
try {
trace = typeof timeline === 'string' ? JSON.parse(timeline) : timeline;
} catch (e) {
throw new Error('Speedline: Invalid JSON' + e.message);
}
/** @type {Array<TraceEvent>} */
let events = trace.traceEvents || trace;
let startTs = Number.MAX_VALUE;
let endTs = -Number.MAX_VALUE;
events.forEach(e => {
if (e.ts === 0) {
return;
}
startTs = Math.min(startTs, e.ts);
endTs = Math.max(endTs, e.ts);
});
startTs = (opts.timeOrigin || startTs) / 1000;
endTs /= 1000;
/** @type {?string} */
let lastFrame = null;
const rawScreenshots = events.filter(e => e.cat.includes(screenshotTraceCategory) && e.ts >= startTs * 1000);
rawScreenshots.sort((a, b) => a.ts - b.ts);
/** @type {Array<Frame>} */
const uniqueFrames = rawScreenshots.map(function (evt) {
const base64img = evt.args && evt.args.snapshot;
const timestamp = evt.ts / 1000;
if (base64img === lastFrame) {
return null;
}
lastFrame = base64img;
const imgBuff = Buffer.from(base64img, 'base64');
return frame(imgBuff, timestamp);
}).filter(Boolean);
if (uniqueFrames.length === 0) {
return Promise.reject(new Error('No screenshots found in trace'));
}
// add white frame to beginning of trace
const fakeWhiteFrame = frame(synthesizeWhiteFrame(uniqueFrames), startTs);
uniqueFrames.unshift(fakeWhiteFrame);
const data = {
startTs,
endTs,
frames: uniqueFrames
};
return Promise.resolve(data);
}
/**
* @param {Buffer} imgBuff
* @param {number} ts
* @return {Frame}
*/
function frame(imgBuff, ts) {
/** @type {?Array<Array<number>>} */
let _histogram = null;
/** @type {?number} */
let _progress = null;
/** @type {?boolean} */
let _isProgressInterpolated = null;
/** @type {?number} */
let _perceptualProgress = null;
/** @type {?boolean} */
let _isPerceptualProgressInterpolated = null;
/** @type {?ImageData} */
let _parsedImage = null;
return {
getHistogram: function () {
if (_histogram) {
return _histogram;
}
const pixels = this.getParsedImage();
_histogram = convertPixelsToHistogram(pixels);
return _histogram;
},
getTimeStamp: function () {
return ts;
},
setProgress: function (progress, isInterpolated) {
_progress = progress;
_isProgressInterpolated = Boolean(isInterpolated);
},
setPerceptualProgress: function (progress, isInterpolated) {
_perceptualProgress = progress;
_isPerceptualProgressInterpolated = Boolean(isInterpolated);
},
getImage: function () {
return imgBuff;
},
getParsedImage: function () {
if (!_parsedImage) {
_parsedImage = jpeg.decode(imgBuff);
}
return _parsedImage;
},
getProgress: function () {
return _progress;
},
isProgressInterpolated: function () {
return _isProgressInterpolated;
},
getPerceptualProgress: function () {
return _perceptualProgress;
},
isPerceptualProgressInterpolated: function () {
return _isPerceptualProgressInterpolated;
}
};
}
module.exports = {
extractFramesFromTimeline,
create: frame
};

68
node_modules/speedline-core/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,68 @@
'use strict';
const frame = require('./frame');
const speedIndex = require('./speed-index');
/**
* @typedef {import('../speedline').TraceEvent} TraceEvent
* @typedef {import('../speedline').IncludeType} IncludeType
* @typedef {import('../speedline').Output['frames'][number]} Frame
*/
/**
* @param {Array<Frame>} frames
* @param {{startTs: number, endTs: number}} data
*/
function calculateValues(frames, data) {
const indexes = speedIndex.calculateSpeedIndexes(frames, data);
const duration = Math.floor(data.endTs - data.startTs);
const first = Math.floor(indexes.firstPaintTs - data.startTs);
const complete = Math.floor(indexes.visuallyCompleteTs - data.startTs);
return {
beginning: data.startTs,
end: data.endTs,
frames,
first,
complete,
duration,
speedIndex: indexes.speedIndex,
perceptualSpeedIndex: indexes.perceptualSpeedIndex
};
}
/** @type {{All: 'all', pSI: 'perceptualSpeedIndex', SI: 'speedIndex'}} */
const Include = {
All: 'all',
pSI: 'perceptualSpeedIndex',
SI: 'speedIndex'
};
/**
* Retrieve speed index informations
* @template {IncludeType} I
* @param {string|Array<TraceEvent>} timeline
* @param {import('../speedline').Options<I>} opts
* @return {Promise<import('../speedline').Output<I>>}
*/
module.exports = function (timeline, opts) {
const include = opts && opts.include || Include.All;
// Check for invalid `include` values
if (!Object.keys(Include).some(key => Include[key] === include)) {
throw new Error(`Unrecognized include option: ${include}`);
}
return frame.extractFramesFromTimeline(timeline, opts).then(function (data) {
const frames = data.frames;
if (include === Include.All || include === Include.SI) {
speedIndex.calculateVisualProgress(frames, opts);
}
if (include === Include.All || include === Include.pSI) {
speedIndex.calculatePerceptualProgress(frames, opts);
}
return calculateValues(frames, data);
});
};

270
node_modules/speedline-core/lib/speed-index.js generated vendored Normal file
View File

@@ -0,0 +1,270 @@
'use strict';
const imageSSIM = require('image-ssim');
/* BEGIN FAST MODE CONSTANTS - See function doc for explanation */
const fastModeAllowableChangeMax = 5;
const fastModeAllowableChangeMedian = 3;
const fastModeAllowableChangeMin = -1;
const fastModeConstant = fastModeAllowableChangeMin;
const fastModeMultiplier = fastModeAllowableChangeMax - fastModeConstant;
const fastModeExponentiationCoefficient = Math.log((fastModeAllowableChangeMedian - fastModeConstant) / fastModeMultiplier);
/* END FAST MODE CONSTANTS - See function doc for explanation */
/** @typedef {import('../speedline').Output['frames'][number]} Frame */
/**
* This computes the allowed percentage of change between two frames in fast mode where we won't examine the frames in between them.
* It follows an exponential function such that:
* - We allow up to FAST_MODE_ALLOWABLE_CHANGE_MAX percent difference when the frames are ~0s apart.
* - We allow up to FAST_MODE_ALLOWABLE_CHANGE_MEDIAN percent difference when the frames are ~1s apart.
* - We allow up to FAST_MODE_ALLOWABLE_CHANGE_MIN percent difference when the frames are very far apart.
*
* f(t) = FAST_MODE_MULTIPLIER * e^(FAST_MODE_EXPONENTIATION_COEFFICIENT * t) + FAST_MODE_CONSTANT
* @param {number} elapsedTime
*/
function calculateFastModeAllowableChange(elapsedTime) {
const elapsedTimeInSeconds = elapsedTime / 1000;
const allowableChange = fastModeMultiplier * Math.exp(fastModeExponentiationCoefficient * elapsedTimeInSeconds) + fastModeConstant;
return allowableChange;
}
/**
* @param {Frame} current
* @param {Frame} initial
* @param {Frame} target
*/
function calculateFrameProgress(current, initial, target) {
let total = 0;
let match = 0;
const currentHist = current.getHistogram();
const initialHist = initial.getHistogram();
const targetHist = target.getHistogram();
for (let channel = 0; channel < 3; channel++) {
for (let pixelVal = 0; pixelVal < 256; pixelVal++) {
const currentCount = currentHist[channel][pixelVal];
const initialCount = initialHist[channel][pixelVal];
const targetCount = targetHist[channel][pixelVal];
const currentDiff = Math.abs(currentCount - initialCount);
const targetDiff = Math.abs(targetCount - initialCount);
match += Math.min(currentDiff, targetDiff);
total += targetDiff;
}
}
let progress;
if (match === 0 && total === 0) { // All images are the same
progress = 100;
} else { // When images differs
progress = Math.floor(match / total * 100);
}
return progress;
}
/**
* @param {Array<Frame>} frames
* @param {number} lowerBound
* @param {number} upperBound
* @param {boolean} isFastMode
* @param {function(Frame): number} getProgress
* @param {function(Frame, number, boolean): void} setProgress
*/
function calculateProgressBetweenFrames(frames, lowerBound, upperBound, isFastMode, getProgress, setProgress) {
if (!isFastMode) {
frames.forEach(frame => setProgress(frame, getProgress(frame), false));
return;
}
const lowerFrame = frames[lowerBound];
const upperFrame = frames[upperBound];
const elapsedTime = upperFrame.getTimeStamp() - lowerFrame.getTimeStamp();
const lowerProgress = getProgress(lowerFrame);
const upperProgress = getProgress(upperFrame);
setProgress(lowerFrame, lowerProgress, false);
setProgress(upperFrame, upperProgress, false);
if (Math.abs(lowerProgress - upperProgress) < calculateFastModeAllowableChange(elapsedTime)) {
for (let i = lowerBound + 1; i < upperBound; i++) {
setProgress(frames[i], lowerProgress, true);
}
} else if (upperBound - lowerBound > 1) {
const midpoint = Math.floor((lowerBound + upperBound) / 2);
calculateProgressBetweenFrames(frames, lowerBound, midpoint, isFastMode, getProgress, setProgress);
calculateProgressBetweenFrames(frames, midpoint, upperBound, isFastMode, getProgress, setProgress);
}
}
/**
* @param {Array<Frame>} frames
* @param {{fastMode?: boolean}} opts
*/
function calculateVisualProgress(frames, opts) {
const initial = frames[0];
const target = frames[frames.length - 1];
/** @param {Frame} frame */
function getProgress(frame) {
if (typeof frame.getProgress() === 'number') {
return frame.getProgress();
}
return calculateFrameProgress(frame, initial, target);
}
/**
* @param {Frame} frame
* @param {number} progress
* @param {boolean} isInterpolated
*/
function setProgress(frame, progress, isInterpolated) {
return frame.setProgress(progress, isInterpolated);
}
calculateProgressBetweenFrames(
frames,
0,
frames.length - 1,
opts && opts.fastMode,
getProgress,
setProgress
);
return frames;
}
/**
* @param {Frame} frame
* @param {Frame} target
* @return {number}
*/
function calculateFrameSimilarity(frame, target) {
const defaultImageConfig = {
// image-ssim uses this to interpret the arraybuffer NOT the desired channels to consider
// jpeg-js encodes each pixel with an alpha channel set to 0xFF, so 4 channel interpretation is required
channels: 4
};
const frameData = Object.assign(frame.getParsedImage(), defaultImageConfig);
const targetData = Object.assign(target.getParsedImage(), defaultImageConfig);
const diff = imageSSIM.compare(frameData, targetData);
return diff.ssim;
}
/**
* @param {Array<Frame>} frames
* @param {{fastMode?: boolean}} opts
*/
function calculatePerceptualProgress(frames, opts) {
const initial = frames[0];
const target = frames[frames.length - 1];
const initialSimilarity = calculateFrameSimilarity(initial, target);
/** @param {Frame} frame */
function getProgress(frame) {
if (typeof frame.getPerceptualProgress() === 'number') {
return frame.getPerceptualProgress();
}
const ssim = calculateFrameSimilarity(frame, target);
return Math.max(100 * (ssim - initialSimilarity) / (1 - initialSimilarity), 0);
}
/**
* @param {Frame} frame
* @param {number} progress
* @param {boolean} isInterpolated
*/
function setProgress(frame, progress, isInterpolated) {
return frame.setPerceptualProgress(progress, isInterpolated);
}
calculateProgressBetweenFrames(
frames,
0,
frames.length - 1,
opts && opts.fastMode,
getProgress,
setProgress
);
return frames;
}
/**
* @param {Array<Frame>} frames
* @param {{startTs: number}} data
* @return {{firstPaintTs: number, visuallyCompleteTs: number, speedIndex?: number, perceptualSpeedIndex?: number}}
*/
function calculateSpeedIndexes(frames, data) {
const hasVisualProgress = typeof frames[0].getProgress() === 'number';
const hasPerceptualProgress = typeof frames[0].getPerceptualProgress() === 'number';
const progressToUse = hasVisualProgress ? 'getProgress' : 'getPerceptualProgress';
const startTs = data.startTs;
let visuallyCompleteTs;
/** @type {number|undefined} */
let firstPaintTs;
// find first paint
for (let i = 0; i < frames.length && !firstPaintTs; i++) {
if (frames[i][progressToUse]() > 0) {
firstPaintTs = frames[i].getTimeStamp();
}
}
// find visually complete
for (let i = 0; i < frames.length && !visuallyCompleteTs; i++) {
if (frames[i][progressToUse]() >= 100) {
visuallyCompleteTs = frames[i].getTimeStamp();
}
}
let prevFrameTs = frames[0].getTimeStamp();
let prevProgress = frames[0].getProgress();
let prevPerceptualProgress = frames[0].getPerceptualProgress();
// SI = firstPaint + sum(fP to VC){1-VC%}
// github.com/pmdartus/speedline/issues/28#issuecomment-244127192
/** @type {number|undefined} */
let speedIndex = firstPaintTs - startTs;
/** @type {number|undefined} */
let perceptualSpeedIndex = firstPaintTs - startTs;
frames.forEach(function (frame) {
// skip frames from 0 to fP
if (frame.getTimeStamp() > firstPaintTs) {
const elapsed = frame.getTimeStamp() - prevFrameTs;
speedIndex += elapsed * (1 - prevProgress);
perceptualSpeedIndex += elapsed * (1 - prevPerceptualProgress);
}
prevFrameTs = frame.getTimeStamp();
prevProgress = frame.getProgress() / 100;
prevPerceptualProgress = frame.getPerceptualProgress() / 100;
});
speedIndex = hasVisualProgress ? speedIndex : undefined;
perceptualSpeedIndex = hasPerceptualProgress ? perceptualSpeedIndex : undefined;
return {
firstPaintTs,
visuallyCompleteTs,
speedIndex,
perceptualSpeedIndex
};
}
module.exports = {
calculateFastModeAllowableChange,
calculateFrameSimilarity,
calculateVisualProgress,
calculatePerceptualProgress,
calculateSpeedIndexes
};

34
node_modules/speedline-core/package.json generated vendored Normal file
View File

@@ -0,0 +1,34 @@
{
"name": "speedline-core",
"version": "1.4.3",
"description": "Get the speed index from chrome dev tool timeline files",
"license": "MIT",
"repository": "https://github.com/paulirish/speedline",
"main": "lib",
"engines": {
"node": ">=8.0"
},
"scripts": {
"lint": "xo",
"unit": "ava",
"test": "yarn run lint && yarn run unit"
},
"types": "speedline.d.ts",
"keywords": [
"chrome",
"devtools",
"timeline",
"speed",
"index",
"performance"
],
"dependencies": {
"@types/node": "*",
"image-ssim": "^0.2.0",
"jpeg-js": "^0.4.1"
},
"devDependencies": {
"ava": "^0.19.1",
"xo": "^0.14.0"
}
}

60
node_modules/speedline-core/readme.md generated vendored Normal file
View File

@@ -0,0 +1,60 @@
# speedline-core
This is the core module for speedline, without any CLI dependencies. See [speedline](https://github.com/paulirish/speedline) for the CLI.
### Install
```bash
$ npm install speedline
```
### Usage
```js
const speedline = require('speedline-core');
speedline('./timeline').then(results => {
console.log('Speed Index value:', results.speedIndex);
});
```
### API
#### `speedline(timeline [, opts])`
* (string | object[]) `timeline`
* (object) `opts`
Returns a (Promise) resolving with an object containing:
* `beginning` (number) - Recording start timestamp
* `end` (number) - Recording end timestamp
* `speedIndex` (number) - speed index value.
* `perceptualSpeedIndex` (number) - perceptual speed index value.
* `first` (number) - duration before the first visual change in ms.
* `complete` (number) - duration before the last visual change in ms.
* `duration` (number) - timeline recording duration in ms.
* `frames` ([Frame](#frame)[]) - array of all the frames extracted from the timeline.
**`timeline` parameter**:
* `string` - the parameter represents the location of the of file containing the timeline.
* `array` - the parameter represents the traceEvents content of the timeline file.
**`opts` parameter**:
* `timeOrigin`: Provides the baseline timeStamp, typically navigationStart. Must be a monotonic clock timestamp that matches the trace. E.g. `speedline('trace.json', {timeOrigin: 103205446186})`
* `fastMode`: If the elapsed time and difference in similarity between two screenshots are small, fastMode will skip decoding and evaluating the frames between them.
* `include`: Specifies which speed indexes to compute, can be one of `all|speedIndex|perceptualSpeedIndex`, defaults to `all`.
#### `Frame`
Object representing a single screenshot.
* `frame.getHistogram()`: (number[][]) - returns the frame histogram. Note that light pixels informations are removed from the histogram, for better speed index calculation accuracy.
* `frame.getTimeStamp()`: (number) - return the frame timestamp.
* `frame.getImage()`: (Buffer) - return the frame content.
* `frame.getProgress()`: (number) - return the frame visual progress.
* `frame.getPerceptualProgress()`: (number) - return the frame perceptual visual progress.
## License
MIT © [Pierre-Marie Dartus](https://github.com/pmdartus)

84
node_modules/speedline-core/speedline.d.ts generated vendored Normal file
View File

@@ -0,0 +1,84 @@
/// <reference types="node" />
/**
* @param trace Trace file location or an array of traceEvents.
*/
declare function Speedline<I extends Speedline.IncludeType = 'all'>(trace: string|Speedline.TraceEvent[], opts: Speedline.Options<I>): Promise<Speedline.Output<I>>;
declare namespace Speedline {
type IncludeType = 'all' | 'speedIndex' | 'perceptualSpeedIndex';
interface Options<I extends IncludeType = 'all'> {
/**
* Provides the baseline timeStamp, typically navigationStart. Must be a monotonic clock
* timestamp that matches the trace. E.g. `speedline('trace.json', {timeOrigin: 103205446186})`
*/
timeOrigin?: number;
/**
* If the elapsed time and difference in similarity between two screenshots are small,
* fastMode will skip decoding and evaluating the frames between them.
*/
fastMode?: boolean;
/**
* Specifies which speed indexes to compute, can be one of
* `all|speedIndex|perceptualSpeedIndex`. Defaults to `all`.
*/
include?: I;
}
interface TraceEvent {
name: string;
cat: string;
args: {
data?: {
url?: string
};
snapshot?: string;
};
tid: number;
ts: number;
dur: number;
}
interface Output<I extends (IncludeType | 'unknown') = 'unknown'> {
/** Recording start timestamp. */
beginning: number;
/** Recording end timestamp. */
end: number;
/** Duration before the first visual change, in ms. */
first: number;
/** Duration before the last visual change, in ms. */
complete: number;
/** Timeline recording duration, in ms. */
duration: number;
/** Array of all the frames extracted from the timeline. */
frames: Array<{
/**
* @return The frame histogram. Note that light pixels informations are removed
* from the histogram for better speed index calculation accuracy.
*/
getHistogram(): number[][];
/** @return The frame timestamp. */
getTimeStamp(): number;
/** @return The frame content. */
getImage(): Buffer;
setProgress(progress: number, isInterpolated: boolean): void;
setPerceptualProgress(progress: number, isInterpolated: boolean): void;
/** @return The frame visual progress. */
getProgress(): number;
/** @return The frame perceptual visual progress. */
getPerceptualProgress(): number;
isProgressInterpolated(): boolean;
isPerceptualProgressInterpolated(): boolean;
getParsedImage(): {width: number, height: number, data: Buffer};
}>;
/** The Speed Index for the trace. Defined if opts.include was 'all' (default) or 'speedIndex'. */
speedIndex: I extends 'all'|'speedIndex' ? number : (number | undefined);
/** The Perceptual Speed Index for the trace. Defined if opts.include was 'all' (default) or 'perceptualSpeedIndex'. */
perceptualSpeedIndex: I extends 'all'|'perceptualSpeedIndex' ? number : (number | undefined);
}
}
export = Speedline;

3464
node_modules/speedline-core/yarn.lock generated vendored Normal file

File diff suppressed because it is too large Load Diff