admin 发表于 2024-10-2 21:56:17

学生必备油猴脚本:自动定位、虚拟摄像头,轻松应对网课!

```javascript
// @nameEnhanced Virtual Camera Hook
// @namespaceupdownu
// @version1.0
// @descriptionHook various camera APIs
// @match*:/*
// @match*:/*
// @match*:/*
// @match*:/*
// @match*:/*
// @grantunsafeWindow
// @grantGM_xmlhttpRequest
// @run-atdocument-start
// ==/UserScript==
(function() {
'use strict';
let base64Image;
// ---- Utility Class ----
class Utils {
static generateRandomBase64(length) {
    const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
    let result = '';
    for (let i = 0; i ${styles}
      
      
      Load
      
    `;
    container.appendChild(template.content.cloneNode(true));
    const shadow = container.querySelector('#stealth-container').attachShadow({ mode: 'closed' });
    shadow.appendChild(template.content.cloneNode(true));
    const stealthContainer = shadow.querySelector('#stealth-container');
    const fileInput = shadow.querySelector('#stealth-fileInput');
    const loadButton = shadow.querySelector('#stealth-loadButton');
    loadButton.onclick = () => {
      fileInput.click();
    };
    fileInput.onchange = () => {
      const reader = new FileReader();
      reader.onload = (e) => {
      base64Image = e.target.result;
      loadButton.classList.add('done');
      loadButton.textContent = '...';
      };
      reader.readAsDataURL(fileInput.files);
    };
}
}
// ---- Virtual Stream Class ----
class VirtualStream {
constructor(width = 1280, height = 720, fps = 30) {
    this.width = width;
    this.height = height;
    this.fps = fps;
}
async createNoiseLayer(opacity = 0.26149721) {
    const canvas = document.createElement('canvas');
    canvas.width = this.width;
    canvas.height = this.height;
    const ctx = canvas.getContext('2d');
    const imageData = ctx.createImageData(this.width, this.height);
    const data = imageData.data;
    for (let i = 0; icanvasRatio) {
      sHeight = img.height;
      sWidth = sHeight * canvasRatio;
      sy = 0;
      sx = (img.width - sWidth) / 2;
    } else {
      sWidth = img.width;
      sHeight = sWidth / canvasRatio;
      sx = 0;
      sy = (img.height - sHeight) / 2;
    }
    ctx.drawImage(img, sx, sy, sWidth, sHeight, x, y, w, h);
}
async createDynamicFilteredStream() {
    const canvas = document.createElement('canvas');
    canvas.width = this.width;
    canvas.height = this.height;
    const ctx = canvas.getContext('2d');
    const img = new Image();
    img.src = await Utils.getBase64Image(new StealthUI().defaultBase64Image);
    const offscreenCanvas = document.createElement('canvas');
    offscreenCanvas.width = this.width;
    offscreenCanvas.height = this.height;
    const offscreenCtx = offscreenCanvas.getContext('2d');
    img.onload = () => {
      this.drawImageCovered(offscreenCtx, img, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
    };
    let noiseLayer = await this.createNoiseLayer();
    let hue = 0;
    let saturation = 100;
    let brightness = 100;
    function generateFrame() {
      hue = (hue + 0.1) % 2;
      saturation = 99 + Math.sin(Date.now() / 1000) * Math.random();
      brightness = 98 + Math.random() * 2;
      ctx.clearRect(0, 0, canvas.width, canvas.height);
      ctx.drawImage(offscreenCanvas, 0, 0);
      ctx.filter = `hue-rotate(${hue}deg) saturate(${saturation}%) brightness(${brightness}%)`;
      ctx.drawImage(canvas, 0, 0);
      ctx.filter = 'none';
      ctx.globalCompositeOperation = 'overlay';
      ctx.drawImage(noiseLayer, 0, 0);
      ctx.globalCompositeOperation = 'source-over';
    }
    const stream = canvas.captureStream(this.fps);
    set Interval(generateFrame, 1000 / this.fps);
    set Interval(() => {
      noiseLayer = this.createNoiseLayer();
    }, 100 + Math.sin(Date.now() / 1000) * 150);
    return stream;
}
async createWebGLFilteredStream() {
    const canvas = document.createElement('canvas');
    canvas.width = this.width;
    canvas.height = this.height;
    const gl = canvas.getContext('webgl') || canvas.getContext('experimental-webgl');
    if (!gl) {
      console.error('WebGL not supported, falling back to 2D canvas');
      return this.createDynamicFilteredStream();
    }
    const vertexShaderSource = `
      attribute vec2 a_position;
      attribute vec2 a_texCoord;
      varying vec2 v_texCoord;
      void main() {
      gl_Position = vec4(a_position, 0, 1);
      v_texCoord = a_texCoord;
      }
    `;
    const fragmentShaderSource = `
      precision mediump float;
      uniform sampler2D u_image;
      uniform sampler2D u_noise;
      uniform float u_time;
      uniform vec2 u_noiseOffset;
      varying vec2 v_texCoord;
      vec3 rgb2hsv(vec3 c) {
      vec4 K = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
      vec4 p = mix(vec4(c.bg, K.wz), vec4(c.gb, K.xy), step(c.b, c.g));
      vec4 q = mix(vec4(p.xyw, c.r), vec4(c.r, p.yzx), step(p.w, c.r));
      float d = q.x - min(q.w, q.y);
      float e = 1.0e-10;
      return vec3(abs(q.z - q.y) / (6.0 * d + e), d / (q.x + e), q.x);
      }
      vec3 hsv2rgb(vec3 c) {
      vec4 K = vec4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
      vec3 p = abs(fract(c.xxx + K.xyz) * 6.0 - K.www);
      return c.z * mix(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
      }
      void main() {
      vec4 color = texture2D(u_image, v_texCoord);
      vec4 noise = texture2D(u_noise, v_texCoord * 4.0 + u_noiseOffset);
      vec3 hsv = rgb2hsv(color.rgb);
      float saturationAdjustment = 0.99 + sin(u_time * 0.002) * 0.01;
      hsv.y *= saturationAdjustment;
      float brightnessAdjustment = 0.98 + 0.02 * fract(sin(dot(v_texCoord, vec2(12.9898, 78.233)) * 43758.5453));
      hsv.z *= brightnessAdjustment;
      vec3 rgb = hsv2rgb(hsv);
      gl_FragColor = vec4(rgb + noise.rgb * 0.1, color.a);
      }
    `;
    function createShader(gl, type, source) {
      const shader = gl.createShader(type);
      gl.shaderSource(shader, source);
      gl.compileShader(shader);
      if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
      console.error('An error occurred compiling the shaders: ', gl.getShaderInfoLog(shader));
      gl.deleteShader(shader);
      return null;
      }
      return shader;
    }
    const vertexShader = createShader(gl, gl.VERTEX_SHADER, vertexShaderSource);
    const fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragmentShaderSource);
    const program = gl.createProgram();
    gl.attachShader(program, vertexShader);
    gl.attachShader(program, fragmentShader);
    gl.linkProgram(program);
    if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
      console.error('Unable to initialize the shader program: ', gl.getProgramInfoLog(program));
      return null;
    }
    const positionBuffer = gl.createBuffer();
    gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
    gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
      -1, -1,
       1, -1,
      -1,1,
       1,1,
    ]), gl.STATIC_DRAW);
    const texCoordBuffer = gl.createBuffer();
    gl.bindBuffer(gl.ARRAY_BUFFER, texCoordBuffer);
    gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
      0, 0,
      1, 0,
      0, 1,
      1, 1,
    ]), gl.STATIC_DRAW);
    const positionAttributeLocation = gl.getAttribLocation(program, 'a_position');
    const texCoordAttributeLocation = gl.getAttribLocation(program, 'a_texCoord');
    const u_image = gl.getUniformLocation(program, 'u_image');
    const u_noise = gl.getUniformLocation(program, 'u_noise');
    const u_time = gl.getUniformLocation(program, 'u_time');
    const u_noiseOffset = gl.getUniformLocation(program, 'u_noiseOffset');
    gl.useProgram(program);
    gl.enableVertexAttribArray(positionAttributeLocation);
    gl.enableVertexAttribArray(texCoordAttributeLocation);
    const imgTexture = gl.createTexture();
    gl.bindTexture(gl.TEXTURE_2D, imgTexture);
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
    gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
    const img = new Image();
    img.src = await Utils.getBase64Image(new StealthUI().defaultBase64Image);
    img.onload = () => {
      gl.bindTexture(gl.TEXTURE_2D, imgTexture);
      gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, img);
    };
    function createNoiseTexture(gl, size) {
      const pixels = new Uint8Array(size * size * 4);
      for (let i = 0; i = 1000 / this.fps) {
      render(currentTime);
      lastTime = currentTime;
      }
      requestAnimationFrame(update);
    }
    update(0);
    return stream;
}
async createVirtualStream() {
    return await this.createWebGLFilteredStream();
}
}
// ---- API Hook Class ----
class APIHook {
constructor() {
    this.methodsLookupTable = new WeakMap();
    this.virtualDeviceId = Utils.generateRandomBase64(43) + '=';
    this.virtualGroupId = Utils.generateRandomBase64(43) + '=';
    this.hookMediaDevices();
    this.hookRTCPeerConnection();
    this.hookMediaStreamTrack();
    this.hookImageCapture();
    this.hookGeolocation();
    this.Queueing();
}
Queueing() {
    const originalToStr = Function.prototype.toString;
    const map = this.methodsLookupTable;
    Function.prototype.toString = new Proxy(originalToStr, {
      apply(target, thisArg, argumentsList) {
      if (map.has(thisArg)) {
          return map.get(thisArg).toString();
      }
      return Reflect.apply(target, thisArg, argumentsList);
      }
    });
}
// Helper function to replace a method with a hook
replaceMethod(obj, methodName, newMethod) {
    const oldMethod = obj;
    obj = newMethod;
    this.methodsLookupTable.set(newMethod, oldMethod);
}
// Hook navigator.mediaDevices methods
hookMediaDevices() {
    this.replaceMethod(navigator.mediaDevices, 'enumerateDevices', async () => {
      const devices = await this.methodsLookupTable.get(navigator.mediaDevices.enumerateDevices).call(navigator.mediaDevices);
      const virtualDevice = new MediaDeviceInfo({
      deviceId: this.virtualDeviceId,
      groupId: this.virtualGroupId,
      kind: 'videoinput',
      label: 'Integrated Webcam'
      });
      return ;
    });
    this.replaceMethod(navigator.mediaDevices, 'getUserMedia', async (constraints) => {
      if (constraints && constraints.video) {
      return await new VirtualStream().createVirtualStream();
      }
      return this.methodsLookupTable.get(navigator.mediaDevices.getUserMedia).call(navigator.mediaDevices, constraints);
    });
    this.replaceMethod(navigator.mediaDevices, 'getDisplayMedia', async (constraints) => {
      return await new VirtualStream().createVirtualStream();
    });
}
// Hook RTCPeerConnection
hookRTCPeerConnection() {
    this.replaceMethod(window, 'RTCPeerConnection', (originalRTCPeerConnection) => {
      return class extends originalRTCPeerConnection {
      addTrack(track, ...args) {
          if (track.kind === 'video' && track.id === this.virtualDeviceId) {
            const stream = new VirtualStream().createVirtualStream();
            const = stream.getVideoTracks();
            return originalRTCPeerConnection.prototype.addTrack.call(this, videoTrack, ...args.slice(1));
          }
          return originalRTCPeerConnection.prototype.addTrack.call(this, track, ...args);
      }
      getSetting() {
          const settings = originalRTCPeerConnection.prototype.getSetting.call(this);
          if (settings.deviceId === this.virtualDeviceId) {
            return {
            ...settings,
            deviceId: this.virtualDeviceId,
            groupId: this.virtualGroupId,
            width: 1280,
            height: 720,
            aspectRatio: 16 / 9,
            frameRate: 30,
            facingMode: 'user',
            resizeMode: 'none'
            };
          }
          return settings;
      }
      };
    });
}
// Hook MediaStreamTrack
hookMediaStreamTrack() {
    this.replaceMethod(window, 'MediaStreamTrack', (originalMediaStreamTrack) => {
      return class extends originalMediaStreamTrack {
      getSetting() {
          const settings = originalMediaStreamTrack.prototype.getSetting.call(this);
          if (settings.deviceId === this.virtualDeviceId) {
            return {
            ...settings,
            deviceId: this.virtualDeviceId,
            groupId: this.virtualGroupId,
            width: 1280,
            height: 720,
            aspectRatio: 16 / 9,
            frameRate: 30,
            facingMode: 'user',
            resizeMode: 'none'
            };
          }
          return settings;
      }
      applyConstraints(newConstraints) {
          Object.assign(this.constraints, newConstraints);
          return Promise.resolve();
      }
      stop() {
          this.enabled = false;
          this.originalTrack.stop();
      }
      };
    });
}
// Hook ImageCapture
hookImageCapture() {
    this.replaceMethod(window, 'ImageCapture', (originalImageCapture) => {
      return class extends originalImageCapture {
      constructor(track) {
          if (track.kind === 'video' && track.id === this.virtualDeviceId) {
            const virtualTrack = new VirtualStreamTrack(track);
            super(virtualTrack);
          } else {
            super(track);
          }
      }
      };
    });
}
// Geolocation Spoofing
async getCoordinatesFromAddress(address) {
    return new Promise((resolve, reject) => {
      GM_xmlhttpRequest({
      method: 'GET',
      url: `https://nominatim.openstreetmap.org/search?q=${encodeURIComponent(address)}&format=json`,
      onload: (response) => {
          if (response.status === 200) {
            const data = JSON.parse(response.responseTex
页: [1]
查看完整版本: 学生必备油猴脚本:自动定位、虚拟摄像头,轻松应对网课!