视频:
shadertoy音频channel使用 - three.js将音频数据传入shader
图片只需传递Textureloader解析的纹理即可
音频 需要解析音频数据生成DataTexture 传入shader
将音频解析
class AudioTexture {
tAudioData!: THREE.DataTexture;
analyser?: THREE.AudioAnalyser;
constructor(file: string, isWebGL2: boolean) {
this.init(file, isWebGL2);
}
init(file: string, isWebGL2: boolean) {
const fftSize = 128;
const listener = new THREE.AudioListener();
const audio = new THREE.Audio(listener);
if (/(iPad|iPhone|iPod)/g.test(navigator.userAgent)) {
const loader = new THREE.AudioLoader();
loader.load(file, function (buffer) {
audio.setBuffer(buffer);
audio.play();
});
} else {
const mediaElement = new Audio(file);
mediaElement.play();
mediaElement.loop = true;
audio.setMediaElementSource(mediaElement);
}
const analyser = new THREE.AudioAnalyser(audio, fftSize);
this.analyser = analyser;
const format = isWebGL2 ? THREE.RedFormat : THREE.LuminanceFormat;
this.tAudioData = new THREE.DataTexture(
analyser.data,
fftSize / 2,
1,
format
);
}
update() {
if (this.analyser) {
this.analyser.getFrequencyData();
this.tAudioData.needsUpdate = true;
}
}
}
可使用的shader代码
const { width, height } = helper.renderer.getDrawingBufferSize(
new THREE.Vector2()
);
const shader = new THREE.ShaderMaterial({
vertexShader: `
varying vec2 vUv;
void main() {
vUv = uv;
vec4 viewPosition = modelViewMatrix * vec4(position, 1.0);
gl_Position = projectionMatrix * viewPosition;
}`,
fragmentShader: `
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
varying vec2 vUv;
uniform vec3 iResolution; // viewport resolution (in pixels)
uniform float iTime;
// based on https://www.shadertoy.com/view/lsf3RH by
// trisomie21 (THANKS!)
// My apologies for the ugly code.
float snoise(vec3 uv, float res) // by trisomie21
{
const vec3 s = vec3(1e0, 1e2, 1e4);
uv *= res;
vec3 uv0 = floor(mod(uv, res))*s;
vec3 uv1 = floor(mod(uv+vec3(1.), res))*s;
vec3 f = fract(uv); f = f*f*(3.0-2.0*f);
vec4 v = vec4(uv0.x+uv0.y+uv0.z, uv1.x+uv0.y+uv0.z,
uv0.x+uv1.y+uv0.z, uv1.x+uv1.y+uv0.z);
vec4 r = fract(sin(v*1e-3)*1e5);
float r0 = mix(mix(r.x, r.y, f.x), mix(r.z, r.w, f.x), f.y);
r = fract(sin((v + uv1.z - uv0.z)*1e-3)*1e5);
float r1 = mix(mix(r.x, r.y, f.x), mix(r.z, r.w, f.x), f.y);
return mix(r0, r1, f.z)*2.-1.;
}
float freqs[4];
void main( )
{
freqs[0] = texture( iChannel1, vec2( 0.01, 0.25 ) ).x;
freqs[1] = texture( iChannel1, vec2( 0.07, 0.25 ) ).x;
freqs[2] = texture( iChannel1, vec2( 0.15, 0.25 ) ).x;
freqs[3] = texture( iChannel1, vec2( 0.30, 0.25 ) ).x;
float brightness = freqs[1] * 0.25 + freqs[2] * 0.25;
float radius = 0.24 + brightness * 0.2;
float invRadius = 1.0/radius;
vec3 orange = vec3( 0.8, 0.65, 0.3 );
vec3 orangeRed = vec3( 0.8, 0.35, 0.1 );
float time = iTime * 0.1;
float aspect = 1.;
// vec2 uv = gl_FragCoord.xy / iResolution.xy;
vec2 uv = vUv;
vec2 p = -0.5 + uv;
p.x *= aspect;
float fade = pow( length( 2.0 * p ), 0.5 );
float fVal1 = 1.0 - fade;
float fVal2 = 1.0 - fade;
float angle = atan( p.x, p.y )/6.2832;
float dist = length(p);
vec3 coord = vec3( angle, dist, time * 0.1 );
float newTime1 = abs( snoise( coord + vec3( 0.0, -time * ( 0.35 + brightness * 0.001 ), time * 0.015 ), 15.0 ) );
float newTime2 = abs( snoise( coord + vec3( 0.0, -time * ( 0.15 + brightness * 0.001 ), time * 0.015 ), 45.0 ) );
for( int i=1; i<=7; i++ ){
float power = pow( 2.0, float(i + 1) );
fVal1 += ( 0.5 / power ) * snoise( coord + vec3( 0.0, -time, time * 0.2 ), ( power * ( 10.0 ) * ( newTime1 + 1.0 ) ) );
fVal2 += ( 0.5 / power ) * snoise( coord + vec3( 0.0, -time, time * 0.2 ), ( power * ( 25.0 ) * ( newTime2 + 1.0 ) ) );
}
float corona = pow( fVal1 * max( 1.1 - fade, 0.0 ), 2.0 ) * 50.0;
corona += pow( fVal2 * max( 1.1 - fade, 0.0 ), 2.0 ) * 50.0;
corona *= 1.2 - newTime1;
vec3 sphereNormal = vec3( 0.0, 0.0, 1.0 );
vec3 dir = vec3( 0.0 );
vec3 center = vec3( 0.5, 0.5, 1.0 );
vec3 starSphere = vec3( 0.0 );
vec2 sp = -1.0 + 2.0 * uv;
sp.x *= aspect;
sp *= ( 2.0 - brightness );
float r = dot(sp,sp);
float f = (1.0-sqrt(abs(1.0-r)))/(r) + brightness * 0.5;
if( dist < radius ){
corona *= pow( dist * invRadius, 24.0 );
vec2 newUv;
newUv.x = sp.x*f;
newUv.y = sp.y*f;
newUv += vec2( time, 0.0 );
vec3 texSample = texture( iChannel0, newUv ).rgb;
float uOff = ( texSample.g * brightness * 4.5 + time );
vec2 starUV = newUv + vec2( uOff, 0.0 );
starSphere = texture( iChannel0, starUV ).rgb;
}
float starGlow = min( max( 1.0 - dist * ( 1.0 - brightness ), 0.0 ), 1.0 );
//gl_FragColor.rgb = vec3( r );
gl_FragColor.rgb = vec3( f * ( 0.75 + brightness * 0.3 ) * orange ) + starSphere + corona * orange + starGlow * orangeRed;
gl_FragColor.a = 1.0;
}
`,
uniforms: {
audioTexture: {
value: null,
},
iResolution: { value: new THREE.Vector2(width, height) },
iTime: { value: 0 },
iChannel0: {
value: new THREE.TextureLoader().load(
"/textures/te1.jpg",
(t) => {
t.wrapS = THREE.RepeatWrapping;
t.wrapT = THREE.RepeatWrapping;
}
),
},
iChannel1: { value: null },
},
});
const plane = new THREE.Mesh(new THREE.PlaneGeometry(1, 1), shader);
e.onBeforeRender = () => {
shader.uniforms.iTime.value += 0.01;
};
document.addEventListener(
"click",
() => {
const audioTexture = new AudioTexture(
"/textures/sound1.mp3",
helper.renderer.capabilities.isWebGL2
);
shader.uniforms.iChannel1.value = audioTexture.tAudioData;
plane.onBeforeRender = () => {
audioTexture.update();
shader.uniforms.iTime.value += 0.01;
};
},
{ once: true }
);