So I'm working on computation using GPU shaders in WebGL- In the javascript portion of the code, I put precalculated values onto an array to send as a texture to be read up in the fragment shader-
var count=0;
var table = [];
for (var y = 0.1; y<1; y+=.01) {
for (var x = 0.1; x<1; x+=.01) {
var u = x;
var u2 = y;
u += u2 * 0.003921568627451;
u *= 4.0;
var sig = 1/(Math.exp(2.0 *(u-3)) + 1);
table.push(Math.floor(sig*255));
//table.push(255);
table.push(0);
table.push(0);
table.push(0);
count+=4;
console.log(Math.floor(sig*255)+"");
console.log(0+"");
console.log(0+"");
console.log(0+"");
}
}
//alert(count);
//alert(table[101]);
texturetab = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texturetab);
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, 100, 100, 0,
gl.RGBA, gl.UNSIGNED_BYTE, new Uint8Array(table));
//
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.uniform1i(gl.getUniformLocation(prog, "uTabSamp"), 2);
but when I "catch" the values of the texture on the shader, the computation acts as if it is only reading values of 0- it's not changing as it should- (Snippet of where the value is caught on the fragment shader) :
sigmoid = (texture2D(uTabSamp, vec2(floor(u*100.+.5), floor(u2*100.+.5))).r)/255.;
float hfunc = sigmoid * u * u;
float ffunc = -u +(a - pow(v*nb,m))*hfunc ;
Seeing as how WebGL is difficult to read, how do I go about fixing this?