How to use the OES_texture_float extension?
I do not understand that it is necessary to specify the arguments the function texImage2D.
var fb=gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
var rb=gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, rb);
gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16,size[0],size[1]);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT,gl.RENDERBUFFER, rb);
var texture=gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, size[0], size[1],0, gl.RGBA, ???, ???);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,gl.TEXTURE_2D, texture, 0);
What you need to write instead of "???"
Assuming you have the extension enabled, its gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, size[0], size[1],0, gl.RGBA, gl.FLOAT, null);
HOWEVER, there is a BIG CAVEAT. OES_texture_float extension doesnt guarantee that you will be able to render to a floating point texture. It just means you can create and read from float point texture. The actual extension that allows you to render to float is WEBGL_color_buffer_float. BUT browsers do not bother to show this extension if they support it. So you have to do
if (gl.checkFramebufferStatus(gl.FRAMEBUFFER) !== gl.FRAMEBUFFER_COMPLETE){
// cant render
}
to check if you can actually render to float textures after you attach the textures to FBO.
Source: spent hours to figure out why the thing isnt working in IE even though it supports OES_texture_float extension.
Also be aware that you can not use gl.LINEAR filtering with floating point textures unless you also enable the OES_texture_float_linear extension which is not available on most mobile devices as of August 2015
Related
I am trying to obtain depth texture on my screen and I am getting a blank screen always. I am not sure what I am doing wrong. I have seen some posts about drawing the FBO to a "quad" but I am not clear on how to do so.
Here is my code (I am using depth texture extension as shown here http://blog.tojicode.com/2012/07/using-webgldepthtexture.html):
function initGLTextureFrameBuffer()
{
var depthTextureExt = gl.getExtension("WEBKIT_WEBGL_depth_texture");
if (!depthTextureExt) return;
rttFramebuffer = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, rttFramebuffer);
rttFramebuffer.width = 512;
rttFramebuffer.height = 512;
colorTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, colorTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, rttFramebuffer.width,rttFramebuffer.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
depthTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, depthTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.DEPTH_COMPONENT, rttFramebuffer.width, rttFramebuffer.height, 0, gl.DEPTH_COMPONENT, gl.UNSIGNED_SHORT, null);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, colorTexture, 0);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.TEXTURE_2D, depthTexture,0);
gl.bindTexture(gl.TEXTURE_2D, null);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
}
Here is my rendering function :
function drawOverlayTriangles()
{
gl.enableVertexAttribArray(shaderProgram.aVertexPosition);
gl.enableVertexAttribArray(shaderProgram.aTextureCoord);
gl.vertexAttrib1f(shaderProgram.aHasTexture, 1.0);
gl.vertexAttrib1f(shaderProgram.aisdepth, 1.0);
gl.bindBuffer(gl.ARRAY_BUFFER, imageTextureCoord);
gl.vertexAttribPointer(shaderProgram.aTextureCoord, 2, gl.FLOAT, false, 0, 0);
//Matrix upload
gl.uniformMatrix4fv(shaderProgram.uMVMatrix, false, pMVMatrix);
gl.uniformMatrix4fv(shaderProgram.uPMatrix, false, perspM);
gl.bindFramebuffer(gl.FRAMEBUFFER, rttFramebuffer);
gl.bindTexture(gl.TEXTURE_2D, depthTexture);
gl.uniform1i(shaderProgram.dTexture, 0);
gl.clearColor(0,0,0,1);
gl.clear(gl.DEPTH_BUFFER_BIT | gl.COLOR_BUFFER_BIT);
for (var i = 0; i < overlay.numElements; i++) {
// Upload overlay vertices
gl.bindBuffer(gl.ARRAY_BUFFER, overlayVertices[i]);
gl.vertexAttribPointer(shaderProgram.aVertexPosition, 3, gl.FLOAT, false, 0, 0);
// Upload overlay colors
gl.bindBuffer(gl.ARRAY_BUFFER, overlayTriangleColors[i]);
gl.vertexAttribPointer(shaderProgram.aVertexColor, 4, gl.FLOAT, false, 0, 0);
// Draw overlay
gl.drawArrays(gl.TRIANGLES, 0, overlay.elementNumVertices[i]);
}
gl.bindTexture(gl.TEXTURE_2D, null);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.disableVertexAttribArray(shaderProgram.aVertexPosition);
gl.disableVertexAttribArray(shaderProgram.aVertexColor);
gl.vertexAttrib1f(shaderProgram.aisdepth, 0.0);
}
Here is my fragment shader code:
varying vec4 vColor;
varying float vHasTexture;
varying vec2 vTextureCoord;
varying float visdepth;
varying vec4 position_1;
varying float DEPTH;
uniform sampler2D uTexture;
uniform float uTextureAlpha;
uniform sampler2D dTexture;
void main(void) {
if (vHasTexture < 0.5 && visdepth < 0.5)
gl_FragColor = vColor;
if (vHasTexture > 0.5)
{
vec4 textureColor = texture2D(uTexture, vec2(vTextureCoord.s, vTextureCoord.t));
gl_FragColor = vec4(textureColor.rgb, textureColor.a * uTextureAlpha);
}
if (visdepth > 0.5)
{
float z = texture2D(dTexture, vec2(vTextureCoord.s, vTextureCoord.t)).r;
float n = 1.0;
float f = 30.0;
float c = (2.0 * n) / (f + n - z * (f - n));
gl_FragColor.rgb = vec3(c);
}
}
I will be grateful for any help because I have been trying to do this for a week with no luck.
It looks like you are trying to read from and write to the depth texture in the same pass. You have:
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.TEXTURE_2D, depthTexture,0);
and
gl.bindFramebuffer(gl.FRAMEBUFFER, rttFramebuffer);
gl.bindTexture(gl.TEXTURE_2D, depthTexture);
This will give you undefined results. You need a two pass algorithm. In the first pass you render to the framebuffer you created. In the second pass you render to the default framebuffer (gl.bindFramebuffer(gl.FRAMEBUFFER, 0);) with the depth texture bound for reading.
Alternatively, if you only care about displaying the depth values as color (rather than using depth as an input to another rendering pass) you can replace the line
float z = texture2D(dTexture, vec2(vTextureCoord.s, vTextureCoord.t)).r;
with
float z = gl_FragCoord.z;
That way you are not reading from the same depth texture you are currently writing to. The built in variable gl_FragCoord is the window coordinates of the current fragment. https://www.opengl.org/sdk/docs/man/html/gl_FragCoord.xhtml
In this case you would just render to the default framebuffer instead of rendering to the framebuffer you created.
I met some problem about using gl_luminance to define FBO. Here is the code i used,
generateRenderToTexture(GL_RGBA, GL_RGBA, GL_UNSIGNED_BYTE, _maskTexture, _imageWidth, _imageHeight, false);
related code is as follows,
TextureBuffer _maskTexture;
class TextureBuffer {
public:
GLuint texture;
GLuint frameBuffer;
GLenum internalformat;
GLenum format;
GLenum type;
int w,h;
TextureBuffer() : texture(0), frameBuffer(0) {}
void release()
{
if(texture)
{
glDeleteTextures(1, &texture);
texture = 0;
}
if(frameBuffer)
{
glDeleteFramebuffers(1, &frameBuffer);
frameBuffer = 0;
}
}
};
void generateRenderToTexture(GLint internalformat, GLenum format, GLenum type,
TextureBuffer &tb, int w, int h, bool linearInterp)
{
glGenTextures(1, &tb.texture);
glBindTexture(GL_TEXTURE_2D, tb.texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, linearInterp ? GL_LINEAR : GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, linearInterp ? GL_LINEAR : GL_NEAREST);
glTexImage2D(GL_TEXTURE_2D, 0, internalformat, w, h, 0, format, type, NULL);
glGenFramebuffers(1, &tb.frameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, tb.frameBuffer);
glClear(_glClearBits);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tb.texture, 0);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if(status != GL_FRAMEBUFFER_COMPLETE)
printf("Framebuffer status: %x", (int)status);
tb.internalformat = internalformat;
tb.format = format;
tb.type = type;
tb.w = w;
tb.h = h;
}
The question is when I use,
generateRenderToTexture(GL_RGBA, GL_RGBA, GL_UNSIGNED_BYTE, _maskTexture, _imageWidth, _imageHeight, false);
The code went well. But if use gl_luminance instead,
generateRenderToTexture(GL_LUMINANCE, GL_LUMINANCE, GL_UNSIGNED_BYTE, _maskTexture, _imageWidthOriginal,
I don't know why i could not use GL_LUMINANCE to define the FBO. Anyone have some useful suggestions to solve this?
The only formats that are guaranteed to work as color FBO attachments in ES 2.0 are, according to table 4.5 in the spec document:
GL_RGBA4
GL_RGB5_A1
GL_RGB565
Support for rendering to GL_RGBA, which works for you, is not required by the standard. Many implementations support it, though. The OES_rgb8_rgba8 extension adds support for GL_RGB8 and GL_RGBA8 formats as render targets.
GL_LUMINANCE is not supported as a color-renderable format by the standard, and I can't find an extension for it either. It's possible that some implementations could support it, but you certainly can't count on it.
ES 3.0 lists GL_R8 as a color-renderable format. In ES 3.0, the RED/R formats replace the LUMINANCE/ALPHA formats from ES 2.0. So if you can move to ES 3.0, you have support to render to 1-component texture formats.
You're using non-extension FBO functions, which were introduced only with OpenGL-3. So unlike FBO extension functions (ending with ARB) those functions are available only with a OpenGL-3 context. In OpenGL-3 the GL_LUMINANCE and GL_ALPHA texture internal formats are deprecated, are not available in core profile. They have been replaced by the GL_RED texture format. You can use an appropriately written shader or texture swizzle parameters to make a GL_RED texture work just like GL_LUMINANCE (swizzle dst.rgb = texture.r) or GL_ALPHA (swizzle dst.a = texture.r).
I have solved by using GL_RG_EXT, or GL_RED_EXT, instead.
I have an OpenGL 3.2 CORE context on OSX 10.7.5 set up and trying to render to a 3D texture,
using a layered rendering approach. The geometry shader feature "gl_layer" is supported, but I cannot bind a GL_TEXTURE_3D to my framebuffer attachment. It returns GL_FRAMEBUFFER_UNSUPPORTED.
This is the card and driver version in my MBP:
AMD Radeon HD 6770M 1024 MB - OpenGL 3.2 CORE (ATI-7.32.12)
This feature does not directly relate to a specific extension AFAIK.
Does anybody know how to figure out whether this is unsupported by the driver or hardware?
Thanks so much.
Below the code to reconstruct. I use glfw to set up the context:
// Initialize GLFW
if (!glfwInit())
throw "Failed to initialize GLFW";
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MAJOR, 3);
glfwOpenWindowHint(GLFW_OPENGL_VERSION_MINOR, 2);
glfwOpenWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwOpenWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
// Open a window and create its OpenGL context
if (!glfwOpenWindow(720, 480, 8, 8, 8, 8, 24, 8, GLFW_WINDOW))
throw "Failed to open GLFW window";
//
// ...
//
GLuint framebuffer, texture;
GLenum status;
glGenFramebuffers(1, &framebuffer);
// Set up the FBO with one texture attachment
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, framebuffer);
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_3D, texture);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_3D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage3D(GL_TEXTURE_3D, 0, GL_RGBA8, 256, 256, 256, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
glFramebufferTexture(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, texture, 0);
status = glCheckFramebufferStatus(GL_DRAW_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE)
throw status;
//
// status is GL_FRAMEBUFFER_UNSUPPORTED here !!!
//
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
glDeleteTextures(1, &texture);
glDeleteFramebuffers(1, &framebuffer);
exit(1);
Does anybody know how to figure out whether this is unsupported by the driver or hardware?
It just told you. That's what GL_FRAMEBUFFER_UNSUPPORTED means: it's the driver exercising veto-power over any framebuffer attachments it doesn't like for any reason whatsoever.
There's not much you can do when this happens except to try other things. Perhaps rendering to a 2D array texture.
Here's my render loop:
Bind a custom FBO
Bind a texture (previously associated with the FBO as COLOR_ATTACHMENT0)
Render using a custom fragment shader which chooses fragment colors on the basis of a fractal algorithm. (If a fragment is not used by the algorithm, it is assigned the color black)
Rebind the window-provided framebuffer and renderbuffer. (on ios 5, this is the [view bindDrawable] method.
Clear the screen to white.
Render the fbo texture into a frame that is substantially smaller than the window itself.
Expected result:
The fractal should appear in the smaller frame. The frame should have a black background. The rest of the screen should be white.
Current result:
The entire screen is taken up by the fractal, as if I were rendering to the window provided fbo, and to my custom fbo/texture.
I don't really know what I'm doing wrong, so I'd be grateful for any help.
EDIT:
Fragment Shader:
void main()
{
highp vec2 pix = gl_FragCoord.xy;
lowp vec4 color = vec4(0.0,0.0,0.0,0.0);
//determine if fragment is part of the fractal using the algorithm
//if yes, change the value of the color vec4
gl_FragColor = color;
}
FBO Initialization:
//First create and bind the frame buffer
glGenFramebuffers(1, &frameBuffer);
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
//Create a texture
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
//Attach the texture to the framebuffer
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texture, 0);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if (status != GL_FRAMEBUFFER_COMPLETE) {
printf("\n Incomplete Frame Buffer!");
}
Render Loop:
{
glBindFramebuffer(GL_FRAMEBUFFER, frameBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, texture);
glViewport(0, 0, width, height);
//Call the fractal render, basically a plane of 4 vertices happening through GL_TRIANGLE_STRIP, but that calls the fragment shader above.
[self.view bindDrawable];
glClearColor(1.0,1.0,1.0,1.0);
matrix4x4 mvp = multiplyMatrices(projectionMatrix, modelView);
glUseProgram(shaderId);
glBindBuffer(GL_ARRAY_BUFFER, vertexBuffer);
glVertexAttribPointer(positionLocation, 4, GL_FLOAT, GL_FALSE, 0, NULL);
glBindBuffer(GL_ARRAY_BUFFER, texelBuffer);
glVertexAttribPointer(texelLocation, 2, GL_FLOAT, GL_FALSE, 0, NULL);
glUniformMatrix4fv(mvpLocation, 1, 0, mvp.val);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureId);
glUniform1i(textureLocation, 0);
glDrawArrays(GL_TRIANGLE_STRIP, 0, tVertices);
}
I hope this helps. Please let me know if you'd like any more information on what I'm doing.
I also noticed something very strange happening.
If, after binding the FBO, I try
glClear(GL_COLOR_BUFFER_BIT);
glClearColor(1.0,0.0,0.0,0.5);
...it is the window-provided framebuffer on which the clear actually happens.
Thank you guys for helping.
For a post processing shader, I need the color and the depth buffer of my framebuffer. Accessing the colorbuffer works fine but I have problems creating the depthbuffer. I always get an INVALID_ENUM error when trying to use texImage2D for the depth texture:
WebGL error INVALID_ENUM in texImage2D(TEXTURE_2D, 0, DEPTH_COMPONENT16, 1536, 502, 0, DEPTH_COMPONENT, UNSIGNED_BYTE, null)
using a renderbuffer instead of a texture works but I want depth in a texture so I can pass it to a shader.
framebuffer with depth texture code:
Framebuffer.prototype.initBufferStuffTexture = function(width, height){
if(this.width == width && this.height == height){
return;
}
this.width = width;
this.height = height;
// remove existing buffers
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
if(this.texture != null){
gl.deleteTexture(this.texture.glid);
this.texture = null;
}
if(this.renderbuffer != null){
gl.deleteRenderbuffer(this.renderbuffer);
this.renderbuffer = null;
}
if(this.framebuffer != null){
gl.deleteFramebuffer(this.framebuffer);
this.framebuffer = null;
}
// create new buffers
this.framebuffer = gl.createFramebuffer();
this.texture = new Texture();
this.texture.glid = gl.createTexture();
this.depth = new Texture();
this.depth.glid = gl.createTexture();
// framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
this.framebuffer.width = width;
this.framebuffer.height = height;
// colorbuffer
gl.bindTexture(gl.TEXTURE_2D, this.texture.glid);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, this.framebuffer.width, this.framebuffer.height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
// depthbuffer
gl.bindTexture(gl.TEXTURE_2D, this.depth.glid);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.DEPTH_COMPONENT16, this.framebuffer.width, this.framebuffer.height, 0, gl.DEPTH_COMPONENT, gl.UNSIGNED_BYTE, null);
// assemble buffers
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, this.texture.glid, 0);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.TEXTURE_2D, this.depth.glid, 0);
this.checkBuffer();
gl.bindTexture(gl.TEXTURE_2D, null);
gl.bindRenderbuffer(gl.RENDERBUFFER, null);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
}
The OpenGL ES 2.0 specification (against which WebGL was specified) doesn't list GL_DEPTH_COMPONENT (or any of its sized versions) as a valid texture internal format, so it seems not to support depth textures and as the WebGL specification doesn't state anywhere that it behaves differently, it also doesn't support depth textures.
But maybe this link is of help, where depth txtures are emulated in WebGL by packing the depth value into a standard rgba texture.