void main(){ //posEye = vec3(gl_ModelViewMatrix * vec4(gl_Vertex.xyz, 1.0)); //float dist = length(posEye); //we packed radius in the 4th component of vertex //pointRadius = gl_Vertex.w; //gl_PointSize = pointRadius * (pointScale / dist); gl_PointSize = 20.0; //gl_PointSize = pointRadius * (1.0 / dist); gl_TexCoord[0] = gl_MultiTexCoord0; //gl_Position = gl_ModelViewProjectionMatrix * vec4(gl_Vertex.xyz, 1.0); gl_Position = ftransform(); gl_Position.z = gl_Position.z*gl_Position.w/5.0; /*gl_FrontColor = gl_Color; if(!blending) { gl_FrontColor.w = 1.0; }*/ }
void main() { lightDir = normalize(vec3(gl_LightSource[0].position)); normal = gl_NormalMatrix * gl_Normal; gl_Position = ftransform(); }
void Lavify::setup(float width, float height) { string vertexShader = "void main() {\ gl_TexCoord[0] = gl_MultiTexCoord0;\ gl_Position = ftransform();\ }";
void ofxBlurShader::setup(float w, float h) { fbo1.allocate(w,h); fbo2.allocate(w,h); string vertexShader = "void main() {\ gl_TexCoord[0] = gl_MultiTexCoord0;\ gl_Position = ftransform();\ }";
void main() { float pointSize = 1000.0 * gl_Point.size; vec4 vert = gl_Vertex; vert.w = 0.75; // was 1.0 vec3 pos_eye = vec3 (gl_ModelViewMatrix * vert); gl_PointSize = max(1.0, pointSize / (1.0 - pos_eye.z)); gl_TexCoord[0] = gl_MultiTexCoord0; gl_Position = ftransform(); gl_FrontColor = gl_Color; gl_FrontSecondaryColor = gl_SecondaryColor; }
void main() { ShadowCoord = gl_TextureMatrix[7] * gl_Vertex; gl_Position = ftransform(); gl_FrontColor = gl_Color; normal = gl_NormalMatrix * gl_Normal; vec4 vertex_in_modelview_space = gl_ModelViewMatrix * gl_Vertex; vertex_to_light_vector = vec3(gl_LightSource[0].position -vertex_in_modelview_space); texture_coordinate = vec2(gl_MultiTexCoord0); }
void main() { // Everything needed to calculate the phong value needs to be in // ModelView space (eye space? not screen space). // And the variables that needs to be calculated here // We need glNormalMatrix because normal is technically flipping some // of coordinates. Math online says so.. :P It does project // into model-view space tho. This value will be interpolated. normal = normalize(gl_NormalMatrix * gl_Normal); gl_TexCoord[0] = gl_MultiTexCoord0; // And need current location from model-view space. This value will get interpolated. curPosition = (gl_ModelViewMatrix * gl_Vertex).xyz; // Standard output gl_Position = ftransform(); }
void ofxBlurShader::setup(float w, float h) { fbo1.allocate(w,h); fbo2.allocate(w,h); fbo1.begin(); ofClear(0, 0, 0); fbo1.end(); fbo2.begin(); ofClear(0, 0, 0); fbo2.end(); string vertexShader = "#version 120\n \ varying vec2 texCoordVarying;\ void main(void)\ {\ texCoordVarying = gl_MultiTexCoord0.xy;\ gl_Position = ftransform();\ }";
bool TexturePainter::init( int texSize ) { // Init the off-screen rendering buffer. m_TexImg = glw::createTexture2D( m_Context, GL_RGB, texSize, texSize, GL_RGB, GL_UNSIGNED_BYTE ); glw::BoundTexture2DHandle boundTex = m_Context.bindTexture2D( m_TexImg, 0 ); boundTex->setSampleMode( glw::TextureSampleMode(GL_NEAREST,GL_NEAREST,GL_CLAMP,GL_CLAMP,GL_CLAMP) ); m_Context.unbindTexture2D( 0 ); m_TexFB = glw::createFramebuffer( m_Context, glw::RenderTarget(), glw::texture2DTarget(m_TexImg) ); // Init shaders used for color correction. const std::string initVertSrc = GLW_STRINGIFY ( void main() { gl_Position = ftransform(); gl_TexCoord[0] = gl_Vertex; gl_TexCoord[1] = gl_MultiTexCoord0; }
bool HesperisIO::GetTransform(BaseTransform * dst, const MDagPath & path) { MStatus stat; MFnTransform ftransform(path, &stat); if(!stat) { MGlobal::displayInfo(MString("is not transform ")+path.fullPathName()); return false; } MPoint mRotatePivot, mScalePivot; MVector mTranslate, mRotatePivotTranslate, mScalePivotTranslate; double mRotationInRadians[3]; double mScales[3]; MTransformationMatrix::RotationOrder mRotationOrder; mTranslate = ftransform.getTranslation(MSpace::kTransform); mScalePivot = ftransform.scalePivot(MSpace::kTransform); mRotatePivot = ftransform.rotatePivot(MSpace::kTransform); mRotatePivotTranslate = ftransform.rotatePivotTranslation(MSpace::kTransform); mScalePivotTranslate = ftransform.scalePivotTranslation(MSpace::kTransform); mRotationOrder = ftransform.rotationOrder(); ftransform.getRotation(mRotationInRadians, mRotationOrder); ftransform.getScale(mScales); //AHelper::PrintMatrix("matrix", ftransform.transformation().asMatrix()); dst->setTranslation(Vector3F(mTranslate.x, mTranslate.y, mTranslate.z)); dst->setRotationAngles(Vector3F(mRotationInRadians[0], mRotationInRadians[1], mRotationInRadians[2])); dst->setScale(Vector3F(mScales[0], mScales[1], mScales[2])); dst->setRotationOrder(GetRotationOrder(mRotationOrder)); dst->setRotatePivot(Vector3F(mRotatePivot.x, mRotatePivot.y, mRotatePivot.z), Vector3F(mRotatePivotTranslate.x, mRotatePivotTranslate.y, mRotatePivotTranslate.z)); dst->setScalePivot(Vector3F(mScalePivot.x, mScalePivot.y, mScalePivot.z), Vector3F(mScalePivotTranslate.x, mScalePivotTranslate.y, mScalePivotTranslate.z)); //AHelper::Info<Matrix44F>("space", dst->space()); return true; }
#define DEF_SURFACE_LIST_HEIGHT 8 static struct st_controls { LWControl *save_file, *program_dir, *border, *convert_to_triangles, *surface_list, *shaderselect_button; }controls; #define MAX_SURFACE_NAME 256 const char * snslb_columns[] = {"Surface", "Shader" }; const int snslb_columnwidths[] = {140, 140 }; const std::string newShaderOpt("<new>"); const char * shader_delimiter = ";\n"; const char * def_vs = "void main()\n \ {\n \ gl_FrontColor = gl_Color;\n \ gl_Position = ftransform();\n \ }"; const char * def_fs = "void main()\n \ {\n \ gl_FragColor = gl_Color;\n \ }"; CMeshExport * meshExport; // Required by macros... static LWPanControlDesc desc; static LWValue ival = { LWT_INTEGER }, ivecval = { LWT_VINT }, fval = { LWT_FLOAT }, fvecval = { LWT_VFLOAT }, sval = { LWT_STRING };
void main() { gl_Position = ftransform(); }
void main() { gl_TexCoord[0] = gl_TextureMatrix[0] * gl_MultiTexCoord0; gl_Position = ftransform(); }
void EarthquakeSet::initContext(GLContextData& contextData) const { /* Create a context data item and store it in the context: */ DataItem* dataItem=new DataItem; contextData.addDataItem(this,dataItem); if(dataItem->vertexBufferObjectId>0) { typedef GLGeometry::Vertex<float,2,GLubyte,4,void,float,3> Vertex; /* Create a vertex buffer object to store the events: */ glBindBufferARB(GL_ARRAY_BUFFER_ARB,dataItem->vertexBufferObjectId); glBufferDataARB(GL_ARRAY_BUFFER_ARB,events.size()*sizeof(Vertex),0,GL_STATIC_DRAW_ARB); Vertex* vPtr=static_cast<Vertex*>(glMapBufferARB(GL_ARRAY_BUFFER_ARB,GL_WRITE_ONLY_ARB)); int numPoints=int(events.size()); for(int i=0;i<numPoints;++i,++vPtr) { /* Get a reference to the event in kd-tree order: */ const Event& e=events[treePointIndices[i]]; /* Copy the event's time: */ vPtr->texCoord[0]=Vertex::TexCoord::Scalar(e.magnitude)-4.0f; vPtr->texCoord[1]=Vertex::TexCoord::Scalar(e.time); /* Map the event's magnitude to color: */ float magnitudeMin=5.0f; float magnitudeMax=9.0f; const int numColors=5; static const Vertex::Color magColorMap[numColors]= { Vertex::Color(0,255,0), Vertex::Color(0,255,255), Vertex::Color(0,0,255), Vertex::Color(255,0,255), Vertex::Color(255,0,0) }; if(e.magnitude<=magnitudeMin) vPtr->color=magColorMap[0]; else if(e.magnitude>=magnitudeMax) vPtr->color=magColorMap[numColors-1]; else { int baseIndex=int(e.magnitude-magnitudeMin); float weight=(e.magnitude-magnitudeMin)-float(baseIndex); for(int i=0;i<4;++i) vPtr->color[i]=GLubyte(float(magColorMap[baseIndex][i])*(1.0f-weight)+float(magColorMap[baseIndex+1][i]*weight)+0.5f); } /* Copy the event's position: */ vPtr->position=e.position; } glUnmapBufferARB(GL_ARRAY_BUFFER_ARB); /* Protect the vertex buffer object: */ glBindBufferARB(GL_ARRAY_BUFFER_ARB,0); } if(dataItem->pointRenderer!=0) { /* Create the point rendering shader: */ static const char* vertexProgram="\ uniform float scaledPointRadius; \ uniform float highlightTime; \ uniform float currentTime; \ uniform vec3 frontSphereCenter; \ uniform float frontSphereRadius2; \ uniform bool frontSphereTest; \ \ void main() \ { \ /* Check if the point is inside the front sphere: */ \ bool valid=dot(gl_Vertex-frontSphereCenter,gl_Vertex-frontSphereCenter)>=frontSphereRadius2; \ if(frontSphereTest) \ valid=!valid; \ if(valid) \ { \ /* Transform the vertex to eye coordinates: */ \ vec4 vertexEye=gl_ModelViewMatrix*gl_Vertex; \ \ /* Calculate point size based on vertex' eye distance along z direction: */ \ float pointSize=scaledPointRadius*2.0*vertexEye.w/vertexEye.z; \ pointSize*=gl_MultiTexCoord0.x; \ \ /* Adapt point size based on current time and time scale: */ \ float highlightFactor=gl_MultiTexCoord0.y-(currentTime-highlightTime); \ if(highlightFactor>0.0&&highlightFactor<=highlightTime) \ pointSize*=2.0*highlightFactor/highlightTime+1.0; \ \ /* Set point size: */ \ gl_PointSize=pointSize; \ \ /* Use standard color: */ \ gl_FrontColor=gl_Color; \ } \ else \ { \ /* Set point size to zero and color to invisible: */ \ gl_PointSize=0.0; \ gl_FrontColor=vec4(0.0,0.0,0.0,0.0); \ } \ \ /* Use standard vertex position for fragment generation: */ \ gl_Position=ftransform(); \ }"; static const char* fragmentProgram="\ uniform sampler2D pointTexture; \ \ void main() \ { \ gl_FragColor=texture2D(pointTexture,gl_TexCoord[0].xy)*gl_Color; \ }";
void main() \n\ { \n\ vec4 PosV = ftransform(); \n\ gl_Position = PosV.xyww; \n\ PosL = gl_Vertex.xyz; \n\ }";
void main() { gl_TexCoord[0].st = vec2(gl_Vertex.xy) * vec2(2.0, -2.0) + vec2(-1.0, 1.0); gl_Position = ftransform(); }
void main() { gl_Position = ftransform(); uv = (gl_Position.xy + vec2(1,1)) / 2.0; }
void main() {\n\ gl_TexCoord[0] = gl_MultiTexCoord0;\n\ gl_Position = ftransform();\n\ }\n";
void main() { coords = gl_MultiTexCoord0.xy; // this is the texture coordinate of the vertex gl_Position = ftransform(); }