Thank you for your help Memo. It's appreciated and I am much closer that I was.
Using your GLSL approach I can apply my texture image to a quad and then warp it much more cleanly. However the geometry of the warping is not working as expected. There's a lot of code here, though I tried to cut it down to the minimum to show the problem I'm having.
I've been bashing my head against this for hours. I suspect that there may just be a simple problem that I'm missing as an OpenGL novice. Any suggestions would be very welcome.
Complete sketch is online at http://hogtownconsulting.com/image_hosting/shadowd_dev.zip
I'm using JohnG's GLSL class (first response in the thread at http://processing.org/discourse/yabb_beta/YaBB.cgi?board=OpenGL;action=display;num=1159494801;) with one method of my own:
Code:
void setUniformValue2f(int uniformLocation, float v0, float v1) {
gl.glUniform2f(uniformLocation, v0, v1);
}
My quadwarp.vert file contains this:
Code:
uniform vec2 BL, BR, TL, TR;
uniform vec2 renderSize;
void main() {
// transform from Processing coords to 0...1
vec2 p = vec2(gl_Vertex.x/renderSize.x, gl_Vertex.y/renderSize.y);
// interpolate bottom edge x coordinate
vec2 x1 = mix(BL, BR, p.x);
// interpolate top edge x coordinate
vec2 x2 = mix(TL, TR, p.x);
// interpolate y position
p = mix(x1, x2, p.y);
// transform from 0...1 to Processing screen coords
p = vec2(p.x*renderSize.x, p.y*renderSize.y);
gl_Position
= gl_ModelViewProjectionMatrix * vec4(p, 0, 1);
gl_FrontColor
= gl_Color;
gl_TexCoord[0]
= gl_TextureMatrix[0] * gl_MultiTexCoord0;
}
And the sketch itself looks like this:
Code:
import javax.media.opengl.GL;
import javax.media.opengl.glu.GLU;
import com.sun.opengl.util.BufferUtil;
import java.nio.*;
import processing.opengl.PGraphicsOpenGL;
import processing.core.*;
GLSL glsl;
boolean shaderOn;
PGraphicsOpenGL pgl;
GL gl;
GLU glu = new GLU();
int texture;
PImage img;
void setup() {
size(800,600,OPENGL);
glsl = new GLSL();
glsl.loadVertexShader("data/quadwarp.vert");
glsl.useShaders();
img = loadImage("data/grid_256x256.jpg");
pgl = (PGraphicsOpenGL) g;
gl = pgl.gl;
gl.glShadeModel(GL.GL_SMOOTH); // Enable Smooth Shading
gl.glClearColor(0.0f, 0.0f, 0.0f, 0.5f); // Black Background
gl.glClearDepth(1.0f); // Depth Buffer Setup
gl.glEnable(GL.GL_DEPTH_TEST); // Enables Depth Testing
gl.glDepthFunc(GL.GL_LEQUAL);// The Type Of Depth Testing To Do
gl.glHint(GL.GL_PERSPECTIVE_CORRECTION_HINT, GL.GL_NICEST);
// Really Nice Perspective Calculations
gl.glEnable(GL.GL_TEXTURE_2D);
texture = genTexture(gl);
gl.glBindTexture(GL.GL_TEXTURE_2D, texture);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MIN_FILTER, GL.GL_LINEAR);
gl.glTexParameteri(GL.GL_TEXTURE_2D, GL.GL_TEXTURE_MAG_FILTER, GL.GL_LINEAR);
}
void draw() {
pgl.beginGL();
glsl.startShader();
glsl.setUniformValue2f(glsl.getUniformLocation("BL"), 0.0, 0.0);
glsl.setUniformValue2f(glsl.getUniformLocation("BR"), 1.0, 0.0);
glsl.setUniformValue2f(glsl.getUniformLocation("TL"), 0.25, 1.0);
glsl.setUniformValue2f(glsl.getUniformLocation("TR"), 1.0, 1.0);
glsl.setUniformValue2f(glsl.getUniformLocation("renderSize"), (float)width/2, (float)height);
gl.glTexImage2D(GL.GL_TEXTURE_2D, 0, GL.GL_RGB, img.width, img.height, 0, GL.GL_RGB, GL.GL_UNSIGNED_BYTE, getTextureByteBuffer( false ) );
gl.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT);
gl.glLoadIdentity(); // Reset The View
gl.glScalef( 100, 100, 100);
gl.glTranslatef(0.0, 0.0, -5.0);
gl.glBindTexture(GL.GL_TEXTURE_2D, texture);
gl.glBegin(GL.GL_QUADS);
gl.glTexCoord2f(0,1);
gl.glVertex3f(0.0f, 1.0f, 1.0f); // Top Left Of The Texture and Quad
gl.glTexCoord2f(0,0);
gl.glVertex3f(0.0f, 0.0f, 1.0f); // Bottom Left Of The Texture and Quad
gl.glTexCoord2f(1,0);
gl.glVertex3f( 1.0f, 0.0f, 1.0f); // Bottom Right Of The Texture and Quad
gl.glTexCoord2f(1,1);
gl.glVertex3f( 1.0f, 1.0f, 1.0f); // Top Right Of The Texture and Quad
gl.glEnd();
glsl.endShader();
pgl.endGL();
}
int genTexture(GL gl) {
final int[] tmp = new int[1];
gl.glGenTextures(1, tmp, 0);
return tmp[0];
}
ByteBuffer getTextureByteBuffer ( boolean useAlphaChannel ) {
int bytesPerPixel = useAlphaChannel ? 4 : 3;
ByteBuffer unpackedPixels = BufferUtil.newByteBuffer( img.pixels.length * bytesPerPixel );
for (int row = img.height - 1; row >= 0; row--) {
for (int col = 0; col < img.width; col++) {
int packedPixel = img.pixels[row * img.width + col];
unpackedPixels.put((byte) ((packedPixel >> 16) & 0xFF));
unpackedPixels.put((byte) ((packedPixel >> 8) & 0xFF));
unpackedPixels.put((byte) ((packedPixel >> 0) & 0xFF));
if ( useAlphaChannel ) {
unpackedPixels.put((byte) ((packedPixel >> 24) & 0xFF));
}
}
}
unpackedPixels.flip();
return unpackedPixels;
}