A script to process visually and auditory scrambled YouTube videos into a human understandable format, but slightly more optimized. Now also decoding hover previews. Includes an aggressive audio compressor to limit loud noises.
当前为
// ==UserScript==
// @name UnsafeYT Decoder
// @author ElectroKnight22
// @namespace unsafe-yt-decoder-namespace
// @version 0.9.5
// @match https://www.youtube.com/*
// @match https://m.youtube.com/*
// @match *://www.youtube-nocookie.com/*
// @exclude *://www.youtube.com/live_chat*
// @require https://update.greasyfork.org/scripts/549881/1689452/YouTube%20Helper%20API.js
// @grant none
// @run-at document-idle
// @inject-into page
// @license MIT
// @description A script to process visually and auditory scrambled YouTube videos into a human understandable format, but slightly more optimized. Now also decoding hover previews. Includes an aggressive audio compressor to limit loud noises.
// ==/UserScript==
/*jshint esversion: 11 */
(function () {
'use strict';
const SCREEN_SHADERS = {
VERTEX: `#version 300 es
in vec2 a_position;
in vec2 a_texCoord;
out vec2 v_texCoord;
void main() {
gl_Position = vec4(a_position, 0.0, 1.0);
v_texCoord = a_texCoord;
}
`,
FRAGMENT: `#version 300 es
precision highp float;
in vec2 v_texCoord;
out vec4 fragColor;
uniform sampler2D u_sampler;
uniform sampler2D u_shuffle;
const float PI = 3.14159265359;
vec4 getColor(vec2 uv) {
vec2 uv_clamped = clamp(uv, 0.0, 1.0);
vec2 shuffle_sample = texture(u_shuffle, uv_clamped).rg;
vec2 final_sample_pos = uv + shuffle_sample;
vec4 c = texture(u_sampler, final_sample_pos);
return vec4(1.0 - c.rgb, c.a);
}
vec2 getNormal(vec2 uv) {
vec2 offset = vec2(0.0065);
vec2 cell_center = round((uv + offset) * 80.0) / 80.0;
return (cell_center - (uv + offset)) * 80.0;
}
float getAxis(vec2 uv) {
vec2 normal = getNormal(uv);
float axisX = abs(normal.x) > 0.435 ? 1.0 : 0.0;
return abs(normal.y) > 0.4 ? 2.0 : axisX;
}
float getGrid(vec2 uv) {
return getAxis(uv) > 0.0 ? 1.0 : 0.0;
}
vec4 getGridFix(vec2 uv) {
vec2 normal = getNormal(uv);
vec4 baseColor = getColor(uv);
vec4 offsetColor = getColor(uv + normal * 0.002);
float gridAmount = getGrid(uv);
return mix(baseColor, offsetColor, gridAmount);
}
vec4 getSmoothed(vec2 uv, float power, float slice) {
vec4 totalColor = vec4(0.0);
float totalWeight = 0.0;
const float sigma = 0.45;
const int sampleCount = 16;
vec2 samples[16] = vec2[](
vec2(-.326, -.405), vec2(-.840, -.073), vec2(-.695, .457), vec2(-.203, .620),
vec2(.962, -.194), vec2(.473, -.480), vec2(.519, .767), vec2(.185, -.893),
vec2(.507, .064), vec2(.896, .412), vec2(-.321, .932), vec2(-.791, -.597),
vec2(.089, .290), vec2(.354, -.215), vec2(-.825, .223), vec2(-.913, -.281)
);
for (int i = 0; i < sampleCount; i++) {
vec2 offset = samples[i] * power;
float dist = length(samples[i]);
float weight = exp(-(dist * dist) / (2.0 * sigma * sigma));
totalColor += getGridFix(uv + offset) * weight;
totalWeight += weight;
}
return totalColor / totalWeight;
}
void main() {
vec2 uv = vec2(v_texCoord.x, 1.0 - v_texCoord.y);
float axis = getAxis(uv);
float grid = axis > 0.0 ? 1.0 : 0.0;
float s[3] = float[3](0.0, 0.0, PI);
vec4 baseColor = getGridFix(uv);
vec4 smoothedColor = getSmoothed(uv, 0.0008, s[int(axis)]);
vec4 finalColor = mix(baseColor, smoothedColor, grid);
fragColor = finalColor;
}
`,
};
const initialAudioState = Object.freeze({
context: null,
sourceNode: null,
mainAudioNode: null, // For unprocessed audio
bypassAudioNode: null, // For processed audio
gainNode: null,
compressor: null,
outputGainNode: null,
notchFilters: [],
});
const initialAppState = Object.freeze({
token: '',
isRendering: false,
canvas: null,
gl: null,
renderLoop: () => {},
audio: { ...initialAudioState },
renderFrameId: null,
originalContainerStyle: null,
resizeObserver: null,
listenerController: null,
videoElement: null,
playerContainer: null,
});
let appState = { ...initialAppState };
let isApplyingEffects = false;
const UI_CACHE = {
toggle: null,
manual: null,
tokenIndicator: null,
};
let userscriptHTMLPolicy = null;
function createTrustedHTML(htmlString) {
if (window.trustedTypes && window.trustedTypes.createPolicy) {
if (!userscriptHTMLPolicy) {
userscriptHTMLPolicy = window.trustedTypes.createPolicy('userscript-html-policy', { createHTML: (s) => s });
}
return userscriptHTMLPolicy.createHTML(htmlString);
}
return htmlString;
}
function getDeterministicHash(inputString, prime = 31, modulus = Math.pow(2, 32)) {
let hash = 0;
modulus = Math.floor(modulus);
for (let i = 0; i < inputString.length; i++) {
const charCode = inputString.charCodeAt(i);
hash = (hash * prime + charCode) % modulus;
if (hash < 0) {
hash += modulus;
}
}
return hash / modulus;
}
function _generateUnshuffleOffsetMapFloat32Array(seedToken, width, height) {
if (!seedToken || width <= 0 || height <= 0) {
throw new Error('Invalid params for unshuffle map.');
}
const totalPixels = width * height;
const startHash = getDeterministicHash(seedToken, 31, 2 ** 32 - 1);
const stepHash = getDeterministicHash(seedToken + '_step', 37, 2 ** 32 - 2);
const startAngle = startHash * Math.PI * 2.0;
const angleIncrement = (stepHash * Math.PI) / Math.max(width, height);
const indexedValues = Array.from({ length: totalPixels }, (_, index) => ({
value: Math.sin(startAngle + index * angleIncrement),
index: index,
}));
indexedValues.sort((itemA, itemB) => itemA.value - itemB.value);
const permutationArray = new Array(totalPixels);
for (let index = 0; index < totalPixels; index++) {
permutationArray[indexedValues[index].index] = index;
}
const offsetMapFloats = new Float32Array(totalPixels * 2);
for (let originalY = 0; originalY < height; originalY++) {
for (let originalX = 0; originalX < width; originalX++) {
const originalLinearIndex = originalY * width + originalX;
const shuffledLinearIndex = permutationArray[originalLinearIndex];
const shuffledY = Math.floor(shuffledLinearIndex / width);
const shuffledX = shuffledLinearIndex % width;
const offsetX = (shuffledX - originalX) / width;
const offsetY = (shuffledY - originalY) / height;
const pixelDataIndex = (originalY * width + originalX) * 2;
offsetMapFloats[pixelDataIndex] = offsetX;
offsetMapFloats[pixelDataIndex + 1] = offsetY;
}
}
return offsetMapFloats;
}
function extractTokenFromText(text) {
try {
if (!text) return '';
const trimmed = text.trim();
const firstLine = trimmed.split(/\r?\n/)[0] || '';
const keyMarkers = ['token:', 'key:'];
let key = '';
keyMarkers.forEach((marker) => {
if (firstLine.toLowerCase().startsWith(marker)) {
key = firstLine.substring(marker.length).trim();
return;
}
});
return key;
} catch (error) {
console.error('[UnsafeYT] Token extraction error:', error);
return '';
}
}
function injectStyles() {
if (document.getElementById('unsafeyt-styles')) return;
const STYLES = ` #unsafeyt-controls { display: flex; gap: 8px; align-items: center; margin-left: 12px; } .unsafeyt-button { background: transparent; color: white; padding: 6px 8px; border-radius: 6px; cursor: pointer; font-size: 12px; font-weight: 600; outline: none; transition: box-shadow .2s, border-color .2s; } #unsafeyt-toggle { border: 2px solid rgba(200,0,0,0.95); } #unsafeyt-toggle.active { border-color: rgba(0,200,0,0.95); box-shadow: 0 0 8px rgba(0,200,0,0.25); } #unsafeyt-manual { border: 1px solid rgba(255,255,255,0.2); } #unsafeyt-token-indicator { width: 10px; height: 10px; border-radius: 50%; margin-left: 6px; background: transparent; } #unsafeyt-token-indicator.present { background: limegreen; } `;
const styleSheet = document.createElement('style');
styleSheet.id = 'unsafeyt-styles';
styleSheet.innerHTML = createTrustedHTML(STYLES);
document.head.appendChild(styleSheet);
}
function createControlButtons() {
try {
if (window.location.pathname !== '/watch' || document.querySelector('#unsafeyt-controls')) return;
injectStyles();
const bar = document.querySelector('#top-level-buttons-computed');
if (!bar) throw new Error('Top-level buttons not found.');
const buttonContainer = document.createElement('div');
buttonContainer.id = 'unsafeyt-controls';
const toggleButton = document.createElement('button');
toggleButton.id = 'unsafeyt-toggle';
toggleButton.type = 'button';
toggleButton.className = 'unsafeyt-button';
toggleButton.textContent = 'Toggle Effects';
toggleButton.addEventListener('click', async () => {
appState.isRendering ? await removeEffects(false) : await applyEffects(appState.token);
});
const manualButton = document.createElement('button');
manualButton.id = 'unsafeyt-manual';
manualButton.type = 'button';
manualButton.className = 'unsafeyt-button';
manualButton.textContent = 'Enter Token';
manualButton.addEventListener('click', async () => {
const userInput = prompt("Enter token (first line of description can also be 'token:...' or 'key:...'):")?.trim();
if (!userInput) return;
try {
await applyEffects(userInput);
} catch (error) {
console.error('[UnsafeYT] Manual token apply failed:', error);
}
});
const tokenIndicator = document.createElement('div');
tokenIndicator.id = 'unsafeyt-token-indicator';
buttonContainer.appendChild(toggleButton);
buttonContainer.appendChild(manualButton);
buttonContainer.appendChild(tokenIndicator);
UI_CACHE.toggle = toggleButton;
UI_CACHE.manual = manualButton;
UI_CACHE.tokenIndicator = tokenIndicator;
bar.insertBefore(buttonContainer, bar.firstChild);
updateUIState();
} catch (error) {
console.error('[UnsafeYT] Error creating control buttons:', error);
}
}
function updateUIState() {
if (UI_CACHE.toggle) UI_CACHE.toggle.classList.toggle('active', appState.isRendering);
if (UI_CACHE.tokenIndicator) UI_CACHE.tokenIndicator.classList.toggle('present', !!appState.token);
}
function compileShader(gl, type, src) {
try {
if (!gl) return null;
const shader = gl.createShader(type);
if (!shader) throw new Error('Failed to create shader.');
gl.shaderSource(shader, src);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
const infoLog = gl.getShaderInfoLog(shader);
gl.deleteShader(shader);
throw new Error(infoLog);
}
return shader;
} catch (error) {
console.error('[UnsafeYT] Shader compile error:', error);
return null;
}
}
function createProgram(gl, vertexShaderSource, fragmentShaderSource) {
let vertexShader = null;
let fragmentShader = null;
try {
if (!gl) return null;
vertexShader = compileShader(gl, gl.VERTEX_SHADER, vertexShaderSource);
fragmentShader = compileShader(gl, gl.FRAGMENT_SHADER, fragmentShaderSource);
if (!vertexShader || !fragmentShader) throw new Error('Shader creation failed.');
const program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
const infoLog = gl.getProgramInfoLog(program);
try {
gl.deleteProgram(program);
} catch (error) {
console.warn('[UnsafeYT] Failed to delete program:', error);
}
throw new Error(`Program link error: ${infoLog}`);
}
gl.useProgram(program);
return program;
} catch (error) {
console.error('[UnsafeYT] Program creation error:', error);
return null;
} finally {
try {
if (vertexShader) gl.deleteShader(vertexShader);
if (fragmentShader) gl.deleteShader(fragmentShader);
} catch (error) {
console.warn('[UnsafeYT] Failed to delete shader post-link:', error);
}
}
}
function setupWebGL(gl, videoElement, token) {
let oesTextureFloatExt = null;
if (gl instanceof WebGLRenderingContext) {
oesTextureFloatExt = gl.getExtension('OES_texture_float');
}
try {
const program = createProgram(gl, SCREEN_SHADERS.VERTEX, SCREEN_SHADERS.FRAGMENT);
if (!program) {
throw new Error('Program creation failed');
}
const positionLocation = gl.getAttribLocation(program, 'a_position');
const texCoordLocation = gl.getAttribLocation(program, 'a_texCoord');
const videoSamplerLocation = gl.getUniformLocation(program, 'u_sampler');
const shuffleSamplerLocation = gl.getUniformLocation(program, 'u_shuffle');
const quadVerts = new Float32Array([-1, -1, 0, 0, 1, -1, 1, 0, -1, 1, 0, 1, -1, 1, 0, 1, 1, -1, 1, 0, 1, 1, 1, 1]);
const vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, quadVerts, gl.STATIC_DRAW);
gl.enableVertexAttribArray(positionLocation);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 16, 0);
gl.enableVertexAttribArray(texCoordLocation);
gl.vertexAttribPointer(texCoordLocation, 2, gl.FLOAT, false, 16, 8);
const videoTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, videoTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
let unshuffleMapFloats = null;
try {
unshuffleMapFloats = _generateUnshuffleOffsetMapFloat32Array(token, 80, 80);
} catch (error) {
console.error('[UnsafeYT] Failed to generate unshuffle map:', error);
throw error;
}
const shuffleTexture = gl.createTexture();
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, shuffleTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
if (gl instanceof WebGL2RenderingContext) {
try {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RG32F, 80, 80, 0, gl.RG, gl.FLOAT, unshuffleMapFloats);
} catch (error) {
console.warn('[UnsafeYT] WebGL2 RG32F texture failed, falling back to RGBA32F:', error);
try {
const rgbaFloatArray = new Float32Array(80 * 80 * 4);
for (let i = 0; i < unshuffleMapFloats.length / 2; i++) {
rgbaFloatArray[i * 4] = unshuffleMapFloats[i * 2];
rgbaFloatArray[i * 4 + 1] = unshuffleMapFloats[i * 2 + 1];
}
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA32F, 80, 80, 0, gl.RGBA, gl.FLOAT, rgbaFloatArray);
} catch (error) {
console.error('[UnsafeYT] WebGL2 RGBA32F texture failed:', error);
throw error;
}
}
} else if (oesTextureFloatExt) {
try {
const rgbaFloatArray = new Float32Array(80 * 80 * 4);
for (let i = 0; i < unshuffleMapFloats.length / 2; i++) {
rgbaFloatArray[i * 4] = unshuffleMapFloats[i * 2];
rgbaFloatArray[i * 4 + 1] = unshuffleMapFloats[i * 2 + 1];
}
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 80, 80, 0, gl.RGBA, gl.FLOAT, rgbaFloatArray);
} catch (error) {
console.error('[UnsafeYT] WebGL1 RGBA texture failed:', error);
throw error;
}
} else {
throw new Error('No float texture support.');
}
gl.clearColor(0, 0, 0, 1);
const render = () => {
if (!appState.isRendering || !gl || !videoElement || !appState.canvas) return;
try {
if (videoElement.readyState >= videoElement.HAVE_CURRENT_DATA) {
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, videoTexture);
try {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, videoElement);
} catch (error) {
try {
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGBA,
videoElement.videoWidth,
videoElement.videoHeight,
0,
gl.RGBA,
gl.UNSIGNED_BYTE,
null,
);
} catch (error) {
console.warn('[UnsafeYT] Failed to update video texture:', error);
}
}
gl.uniform1i(videoSamplerLocation, 0);
gl.uniform1i(shuffleSamplerLocation, 1);
gl.clear(gl.COLOR_BUFFER_BIT);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
} catch (error) {
console.error('[UnsafeYT] WebGL render loop failed:', error);
removeEffects(true).catch((error) =>
console.error('[UnsafeYT] Failed to remove effects after render loop error:', error),
);
}
};
return render;
} catch (error) {
console.error('[UnsafeYT] WebGL setup failed:', error);
throw error;
}
}
function setupAudio(audioContext, videoElement) {
try {
if (!audioContext || !videoElement) return {};
if (!appState.audio.sourceNode) {
try {
appState.audio.sourceNode = audioContext.createMediaElementSource(videoElement);
} catch (error) {
console.warn('[UnsafeYT] Could not create media element source:', error);
appState.audio.sourceNode = null;
}
}
const mainAudioNode = audioContext.createGain();
const bypassAudioNode = audioContext.createGain();
mainAudioNode.gain.value = 0.0;
bypassAudioNode.gain.value = 1.0;
const splitter = audioContext.createChannelSplitter(2);
const leftGain = audioContext.createGain();
const rightGain = audioContext.createGain();
const merger = audioContext.createChannelMerger(1);
leftGain.gain.value = 0.25;
rightGain.gain.value = 0.25;
const gainNode = audioContext.createGain();
gainNode.gain.value = 1.0;
const compressor = audioContext.createDynamicsCompressor();
compressor.threshold.value = -72;
compressor.knee.value = 35;
compressor.ratio.value = 15;
compressor.attack.value = 0.003;
compressor.release.value = 0.25;
const outputGainNode = audioContext.createGain();
outputGainNode.gain.value = 4.0;
const filterConfigs = [
{ f: 200, q: 3, g: 1 },
{ f: 440, q: 2, g: 1 },
{ f: 6600, q: 1, g: 0 },
{ f: 15600, q: 1, g: 0 },
{ f: 5000, q: 20, g: 1 },
{ f: 6000, q: 20, g: 1 },
{ f: 6300, q: 5, g: 1 },
{ f: 8000, q: 40, g: 1 },
{ f: 10000, q: 40, g: 1 },
{ f: 12500, q: 40, g: 1 },
{ f: 14000, q: 40, g: 1 },
{ f: 15000, q: 40, g: 1 },
{ f: 15500, q: 1, g: 0 },
{ f: 15900, q: 1, g: 0 },
{ f: 16000, q: 40, g: 1 },
];
const notchFilters = filterConfigs.map((config) => {
const filter = audioContext.createBiquadFilter();
filter.type = 'notch';
filter.frequency.value = config.f;
filter.Q.value = config.q * 3.5;
filter.gain.value = config.g;
return filter;
});
if (appState.audio.sourceNode) {
appState.audio.sourceNode.connect(mainAudioNode);
mainAudioNode.connect(audioContext.destination);
appState.audio.sourceNode.connect(bypassAudioNode);
bypassAudioNode.connect(splitter);
splitter.connect(leftGain, 0);
splitter.connect(rightGain, 1);
leftGain.connect(merger, 0, 0);
rightGain.connect(merger, 0, 0);
const audioChain = [merger, gainNode, ...notchFilters, compressor, outputGainNode, audioContext.destination];
audioChain.reduce((prev, next) => prev.connect(next));
}
const listenerController = new AbortController();
const { signal } = listenerController;
const handleAudioState = async () => {
if (!audioContext || audioContext.state === 'closed') return;
if (videoElement.paused) {
if (audioContext.state === 'running')
audioContext.suspend().catch((error) => console.warn('[UnsafeYT] Audio context suspend error:', error));
} else {
if (audioContext.state === 'suspended')
audioContext.resume().catch((error) => console.warn('[UnsafeYT] Audio context resume error:', error));
}
};
videoElement.addEventListener('play', handleAudioState, { signal });
videoElement.addEventListener('pause', handleAudioState, { signal });
if (!videoElement.paused) handleAudioState();
return {
mainAudioNode,
bypassAudioNode,
gainNode,
compressor,
outputGainNode,
notchFilters,
listenerController,
};
} catch (error) {
console.error('[UnsafeYT] Audio graph setup failed:', error);
return {};
}
}
function startVideoFrameLoop(videoElement, renderCallback) {
const videoFrameCallback = () => {
if (!appState.isRendering) {
return;
}
renderCallback();
if (appState.videoElement) {
try {
appState.videoElement.requestVideoFrameCallback(videoFrameCallback);
} catch (error) {
console.warn('[UnsafeYT] Failed to request next video frame callback:', error);
}
}
};
if (typeof videoElement.requestVideoFrameCallback !== 'function') {
console.warn('[UnsafeYT] requestVideoFrameCallback not supported. Falling back to requestAnimationFrame.');
const rafLoop = () => {
if (!appState.isRendering) {
appState.renderFrameId = null;
return;
}
renderCallback();
appState.renderFrameId = requestAnimationFrame(rafLoop);
};
appState.renderFrameId = requestAnimationFrame(rafLoop);
} else {
videoElement.requestVideoFrameCallback(videoFrameCallback);
}
}
async function removeEffects(forceDestroy = false) {
if (isApplyingEffects) return;
if (!forceDestroy && !appState.isRendering) return;
isApplyingEffects = true;
try {
appState.isRendering = false;
if (appState.renderFrameId !== null) {
cancelAnimationFrame(appState.renderFrameId);
appState.renderFrameId = null;
}
if (!forceDestroy) {
if (appState.canvas) appState.canvas.style.display = 'none';
if (appState.videoElement) appState.videoElement.style.opacity = '1';
if (appState.audio.mainAudioNode) appState.audio.mainAudioNode.gain.value = 0.2;
if (appState.audio.bypassAudioNode) appState.audio.bypassAudioNode.gain.value = 0.0;
console.log('[UnsafeYT] Paused applied effects.');
updateUIState();
return;
}
console.log('[UnsafeYT] Destroying applied effects.');
if (appState.listenerController) {
appState.listenerController.abort();
appState.listenerController = null;
}
if (appState.canvas) {
try {
appState.canvas.remove();
} catch (error) {
console.warn('[UnsafeYT] Canvas remove error:', error);
}
appState.canvas = null;
}
if (appState.resizeObserver) {
appState.resizeObserver.disconnect();
appState.resizeObserver = null;
}
if (appState.gl) {
try {
const webGLContentExtension = appState.gl.getExtension('WEBGL_lose_context');
if (webGLContentExtension) webGLContentExtension.loseContext();
} catch (error) {
console.warn('[UnsafeYT] GL context lose error:', error);
}
appState.gl = null;
}
const container = appState.playerContainer;
if (container && appState.originalContainerStyle) {
try {
Object.assign(container.style, appState.originalContainerStyle);
} catch (error) {
console.warn('[UnsafeYT] Container style reset error:', error);
}
appState.originalContainerStyle = null;
}
if (appState.audio.sourceNode) {
try {
appState.audio.sourceNode.disconnect();
} catch (error) {
console.warn('[UnsafeYT] Source node disconnect error:', error);
}
}
if (appState.audio.context && appState.audio.context.state !== 'closed') {
appState.audio.context.close().catch((error) => console.warn('[UnsafeYT] Audio context close error:', error));
}
appState.audio = { ...initialAudioState };
const video = appState.videoElement;
if (video) {
video.style.opacity = '1';
}
appState.renderLoop = () => {};
appState.token = '';
updateUIState();
} finally {
isApplyingEffects = false;
}
}
async function applyEffects(seedToken) {
if (isApplyingEffects) return;
if (typeof seedToken !== 'string' || seedToken.length < 3) {
if (appState.gl) await removeEffects(true);
return;
}
const videoElement = appState.videoElement;
const playerContainer = appState.playerContainer;
if (!videoElement || !playerContainer) return;
isApplyingEffects = true;
videoElement.currentTime += 0.001; // Apply seek to force refresh stale video buffer.
try {
if (appState.gl && appState.canvas && appState.token === seedToken) {
console.log('[UnsafeYT] Resuming applied effects.');
videoElement.style.opacity = '0';
appState.canvas.style.display = 'block';
if (appState.audio.mainAudioNode) appState.audio.mainAudioNode.gain.value = 0.0;
if (appState.audio.bypassAudioNode) appState.audio.bypassAudioNode.gain.value = 1.0;
if (appState.audio.context?.state === 'suspended' && !videoElement.paused) {
appState.audio.context.resume().catch((error) => console.warn('[UnsafeYT] Audio context resume error:', error));
}
appState.isRendering = true;
startVideoFrameLoop(videoElement, appState.renderLoop);
updateUIState();
return;
}
await removeEffects(true);
console.log(`[UnsafeYT] Applying effects with token: "${seedToken}"`);
videoElement.style.opacity = '0';
videoElement.crossOrigin = 'anonymous';
appState.canvas = document.createElement('canvas');
appState.canvas.id = 'unsafeyt-glcanvas';
Object.assign(appState.canvas.style, {
position: 'absolute',
top: `${window.youtubeHelperApi.page.isMobile ? '50%' : '0%'}`,
left: '50%',
transform: 'translateY(0%) translateX(-50%)',
pointerEvents: 'none',
zIndex: 12,
touchAction: 'none',
display: 'block',
});
if (!appState.originalContainerStyle)
appState.originalContainerStyle = {
position: playerContainer.style.position,
height: playerContainer.style.height,
};
Object.assign(playerContainer.style, { position: 'relative', height: '100%' });
playerContainer.appendChild(appState.canvas);
appState.gl = appState.canvas.getContext('webgl2', { alpha: false }) || appState.canvas.getContext('webgl', { alpha: false });
if (!appState.gl) {
throw new Error('Failed to get WebGL context.');
}
const resizeCallback = () => {
if (!appState.canvas || !videoElement) return;
appState.canvas.width = videoElement.offsetWidth || videoElement.videoWidth || 640;
appState.canvas.height = videoElement.offsetHeight || videoElement.videoHeight || 360;
if (appState.gl) {
try {
appState.gl.viewport(0, 0, appState.gl.drawingBufferWidth, appState.gl.drawingBufferHeight);
} catch (error) {
console.warn('[UnsafeYT] GL viewport error:', error);
}
}
};
appState.resizeObserver = new ResizeObserver(resizeCallback);
appState.resizeObserver.observe(videoElement);
resizeCallback();
appState.renderLoop = setupWebGL(appState.gl, videoElement, seedToken);
appState.token = seedToken;
const AudioCtx = window.AudioContext || window.webkitAudioContext;
if (AudioCtx) {
if (!appState.audio.context) appState.audio.context = new AudioCtx();
const audioNodes = setupAudio(appState.audio.context, videoElement);
appState.audio = { ...appState.audio, ...audioNodes };
}
appState.isRendering = true;
startVideoFrameLoop(videoElement, appState.renderLoop);
updateUIState();
console.log('[UnsafeYT] Effects applied.');
} catch (error) {
console.error('[UnsafeYT] Failed to apply effects:', error);
await removeEffects(true);
} finally {
isApplyingEffects = false;
}
}
async function processVideo(event) {
try {
const newToken = extractTokenFromText(event.detail.video.rawDescription);
if (!newToken) {
if (appState.gl) {
console.log('[UnsafeYT] Video has no token, destroying effects.');
await removeEffects(true);
}
return;
}
if (newToken !== appState.token) {
console.log('[UnsafeYT] New video with token detected, re-initializing.');
await applyEffects(newToken);
} else if (newToken === appState.token && !appState.isRendering && appState.gl) {
console.log('[UnsafeYT] Resuming effects for same video on page load.');
await applyEffects(newToken);
}
} catch (error) {
console.error('[UnsafeYT] Error in processVideo:', error);
}
}
function _handleApiUpdate(event) {
appState.playerContainer = event.detail.player.playerObject;
appState.videoElement = event.detail.player.videoElement;
createControlButtons();
processVideo(event);
}
console.log('[UnsafeYT] Initializing script.');
window.addEventListener('pageshow', createControlButtons);
window.addEventListener('yt-page-data-updated', createControlButtons);
window.youtubeHelperApi.eventTarget.addEventListener('yt-helper-api-ready', _handleApiUpdate);
})();