GLCameraRipple/RippleViewController.m
/* |
File: RippleViewController.m |
Abstract: View controller that handles camera, drawing, and touch events. |
Version: 1.0 |
Disclaimer: IMPORTANT: This Apple software is supplied to you by Apple |
Inc. ("Apple") in consideration of your agreement to the following |
terms, and your use, installation, modification or redistribution of |
this Apple software constitutes acceptance of these terms. If you do |
not agree with these terms, please do not use, install, modify or |
redistribute this Apple software. |
In consideration of your agreement to abide by the following terms, and |
subject to these terms, Apple grants you a personal, non-exclusive |
license, under Apple's copyrights in this original Apple software (the |
"Apple Software"), to use, reproduce, modify and redistribute the Apple |
Software, with or without modifications, in source and/or binary forms; |
provided that if you redistribute the Apple Software in its entirety and |
without modifications, you must retain this notice and the following |
text and disclaimers in all such redistributions of the Apple Software. |
Neither the name, trademarks, service marks or logos of Apple Inc. may |
be used to endorse or promote products derived from the Apple Software |
without specific prior written permission from Apple. Except as |
expressly stated in this notice, no other rights or licenses, express or |
implied, are granted by Apple herein, including but not limited to any |
patent rights that may be infringed by your derivative works or by other |
works in which the Apple Software may be incorporated. |
The Apple Software is provided by Apple on an "AS IS" basis. APPLE |
MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION |
THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS |
FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND |
OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS. |
IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL |
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION, |
MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED |
AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE), |
STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE |
POSSIBILITY OF SUCH DAMAGE. |
Copyright (C) 2013 Apple Inc. All Rights Reserved. |
*/ |
#import <CoreVideo/CVOpenGLESTextureCache.h> |
#import "RippleViewController.h" |
#import "RippleModel.h" |
// Uniform index. |
enum |
{ |
UNIFORM_Y, |
UNIFORM_UV, |
NUM_UNIFORMS |
}; |
GLint uniforms[NUM_UNIFORMS]; |
// Attribute index. |
enum |
{ |
ATTRIB_VERTEX, |
ATTRIB_TEXCOORD, |
NUM_ATTRIBUTES |
}; |
@interface RippleViewController () { |
GLuint _program; |
GLuint _positionVBO; |
GLuint _texcoordVBO; |
GLuint _indexVBO; |
CGFloat _screenWidth; |
CGFloat _screenHeight; |
size_t _textureWidth; |
size_t _textureHeight; |
unsigned int _meshFactor; |
EAGLContext *_context; |
RippleModel *_ripple; |
CVOpenGLESTextureRef _lumaTexture; |
CVOpenGLESTextureRef _chromaTexture; |
NSString *_sessionPreset; |
AVCaptureSession *_session; |
CVOpenGLESTextureCacheRef _videoTextureCache; |
} |
- (void)cleanUpTextures; |
- (void)setupAVCapture; |
- (void)tearDownAVCapture; |
- (void)setupBuffers; |
- (void)setupGL; |
- (void)tearDownGL; |
- (BOOL)loadShaders; |
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type file:(NSString *)file; |
- (BOOL)linkProgram:(GLuint)prog; |
@end |
@implementation RippleViewController |
- (void)viewDidLoad |
{ |
[super viewDidLoad]; |
_context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; |
if (!_context) { |
NSLog(@"Failed to create ES context"); |
} |
GLKView *view = (GLKView *)self.view; |
view.context = _context; |
self.preferredFramesPerSecond = 60; |
_screenWidth = [UIScreen mainScreen].bounds.size.width; |
_screenHeight = [UIScreen mainScreen].bounds.size.height; |
view.contentScaleFactor = [UIScreen mainScreen].scale; |
if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) |
{ |
// meshFactor controls the ending ripple mesh size. |
// For example mesh width = screenWidth / meshFactor. |
// It's chosen based on both screen resolution and device size. |
_meshFactor = 8; |
// Choosing bigger preset for bigger screen. |
_sessionPreset = AVCaptureSessionPreset1280x720; |
} |
else |
{ |
_meshFactor = 4; |
_sessionPreset = AVCaptureSessionPreset640x480; |
} |
[self setupGL]; |
[self setupAVCapture]; |
} |
- (void)viewDidUnload |
{ |
[super viewDidUnload]; |
[self tearDownAVCapture]; |
[self tearDownGL]; |
if ([EAGLContext currentContext] == _context) { |
[EAGLContext setCurrentContext:nil]; |
} |
} |
- (void)didReceiveMemoryWarning |
{ |
[super didReceiveMemoryWarning]; |
// Release any cached data, images, etc. that aren't in use. |
} |
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation |
{ |
// Camera image orientation on screen is fixed |
// with respect to the physical camera orientation. |
if (interfaceOrientation == UIInterfaceOrientationPortrait) |
return YES; |
else |
return NO; |
} |
- (void)cleanUpTextures |
{ |
if (_lumaTexture) |
{ |
CFRelease(_lumaTexture); |
_lumaTexture = NULL; |
} |
if (_chromaTexture) |
{ |
CFRelease(_chromaTexture); |
_chromaTexture = NULL; |
} |
// Periodic texture cache flush every frame |
CVOpenGLESTextureCacheFlush(_videoTextureCache, 0); |
} |
- (void)captureOutput:(AVCaptureOutput *)captureOutput |
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer |
fromConnection:(AVCaptureConnection *)connection |
{ |
CVReturn err; |
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); |
size_t width = CVPixelBufferGetWidth(pixelBuffer); |
size_t height = CVPixelBufferGetHeight(pixelBuffer); |
if (!_videoTextureCache) |
{ |
NSLog(@"No video texture cache"); |
return; |
} |
if (_ripple == nil || |
width != _textureWidth || |
height != _textureHeight) |
{ |
_textureWidth = width; |
_textureHeight = height; |
_ripple = [[RippleModel alloc] initWithScreenWidth:_screenWidth |
screenHeight:_screenHeight |
meshFactor:_meshFactor |
touchRadius:5 |
textureWidth:_textureWidth |
textureHeight:_textureHeight]; |
[self setupBuffers]; |
} |
[self cleanUpTextures]; |
// CVOpenGLESTextureCacheCreateTextureFromImage will create GLES texture |
// optimally from CVImageBufferRef. |
// Y-plane |
glActiveTexture(GL_TEXTURE0); |
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, |
_videoTextureCache, |
pixelBuffer, |
NULL, |
GL_TEXTURE_2D, |
GL_RED_EXT, |
_textureWidth, |
_textureHeight, |
GL_RED_EXT, |
GL_UNSIGNED_BYTE, |
0, |
&_lumaTexture); |
if (err) |
{ |
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); |
} |
glBindTexture(CVOpenGLESTextureGetTarget(_lumaTexture), CVOpenGLESTextureGetName(_lumaTexture)); |
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); |
// UV-plane |
glActiveTexture(GL_TEXTURE1); |
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, |
_videoTextureCache, |
pixelBuffer, |
NULL, |
GL_TEXTURE_2D, |
GL_RG_EXT, |
_textureWidth/2, |
_textureHeight/2, |
GL_RG_EXT, |
GL_UNSIGNED_BYTE, |
1, |
&_chromaTexture); |
if (err) |
{ |
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); |
} |
glBindTexture(CVOpenGLESTextureGetTarget(_chromaTexture), CVOpenGLESTextureGetName(_chromaTexture)); |
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); |
} |
- (void)setupAVCapture |
{ |
//-- Create CVOpenGLESTextureCacheRef for optimal CVImageBufferRef to GLES texture conversion. |
#if COREVIDEO_USE_EAGLCONTEXT_CLASS_IN_API |
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, _context, NULL, &_videoTextureCache); |
#else |
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)_context, NULL, &_videoTextureCache); |
#endif |
if (err) |
{ |
NSLog(@"Error at CVOpenGLESTextureCacheCreate %d", err); |
return; |
} |
//-- Setup Capture Session. |
_session = [[AVCaptureSession alloc] init]; |
[_session beginConfiguration]; |
//-- Set preset session size. |
[_session setSessionPreset:_sessionPreset]; |
//-- Creata a video device and input from that Device. Add the input to the capture session. |
AVCaptureDevice * videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; |
if(videoDevice == nil) |
assert(0); |
//-- Add the device to the session. |
NSError *error; |
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; |
if(error) |
assert(0); |
[_session addInput:input]; |
//-- Create the output for the capture session. |
AVCaptureVideoDataOutput * dataOutput = [[AVCaptureVideoDataOutput alloc] init]; |
[dataOutput setAlwaysDiscardsLateVideoFrames:YES]; // Probably want to set this to NO when recording |
//-- Set to YUV420. |
[dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] |
forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; // Necessary for manual preview |
// Set dispatch to be on the main thread so OpenGL can do things with the data |
[dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()]; |
[_session addOutput:dataOutput]; |
[_session commitConfiguration]; |
[_session startRunning]; |
} |
- (void)tearDownAVCapture |
{ |
[self cleanUpTextures]; |
CFRelease(_videoTextureCache); |
} |
- (void)setupBuffers |
{ |
glGenBuffers(1, &_indexVBO); |
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indexVBO); |
glBufferData(GL_ELEMENT_ARRAY_BUFFER, [_ripple getIndexSize], [_ripple getIndices], GL_STATIC_DRAW); |
glGenBuffers(1, &_positionVBO); |
glBindBuffer(GL_ARRAY_BUFFER, _positionVBO); |
glBufferData(GL_ARRAY_BUFFER, [_ripple getVertexSize], [_ripple getVertices], GL_STATIC_DRAW); |
glEnableVertexAttribArray(ATTRIB_VERTEX); |
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, GL_FALSE, 2*sizeof(GLfloat), 0); |
glGenBuffers(1, &_texcoordVBO); |
glBindBuffer(GL_ARRAY_BUFFER, _texcoordVBO); |
glBufferData(GL_ARRAY_BUFFER, [_ripple getVertexSize], [_ripple getTexCoords], GL_DYNAMIC_DRAW); |
glEnableVertexAttribArray(ATTRIB_TEXCOORD); |
glVertexAttribPointer(ATTRIB_TEXCOORD, 2, GL_FLOAT, GL_FALSE, 2*sizeof(GLfloat), 0); |
} |
- (void)setupGL |
{ |
[EAGLContext setCurrentContext:_context]; |
[self loadShaders]; |
glUseProgram(_program); |
glUniform1i(uniforms[UNIFORM_Y], 0); |
glUniform1i(uniforms[UNIFORM_UV], 1); |
} |
- (void)tearDownGL |
{ |
[EAGLContext setCurrentContext:_context]; |
glDeleteBuffers(1, &_positionVBO); |
glDeleteBuffers(1, &_texcoordVBO); |
glDeleteBuffers(1, &_indexVBO); |
if (_program) { |
glDeleteProgram(_program); |
_program = 0; |
} |
} |
#pragma mark - GLKView and GLKViewController delegate methods |
- (void)update |
{ |
if (_ripple) |
{ |
[_ripple runSimulation]; |
// no need to rebind GL_ARRAY_BUFFER to _texcoordVBO since it should be still be bound from setupBuffers |
glBufferData(GL_ARRAY_BUFFER, [_ripple getVertexSize], [_ripple getTexCoords], GL_DYNAMIC_DRAW); |
} |
} |
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect |
{ |
glClear(GL_COLOR_BUFFER_BIT); |
if (_ripple) |
{ |
glDrawElements(GL_TRIANGLE_STRIP, [_ripple getIndexCount], GL_UNSIGNED_SHORT, 0); |
} |
} |
#pragma mark - Touch handling methods |
- (void)myTouch:(NSSet *)touches withEvent:(UIEvent *)event |
{ |
for (UITouch *touch in touches) |
{ |
CGPoint location = [touch locationInView:touch.view]; |
[_ripple initiateRippleAtLocation:location]; |
} |
} |
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event |
{ |
[self myTouch:touches withEvent:event]; |
} |
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event |
{ |
[self myTouch:touches withEvent:event]; |
} |
#pragma mark - OpenGL ES 2 shader compilation |
- (BOOL)loadShaders |
{ |
GLuint vertShader, fragShader; |
NSString *vertShaderPathname, *fragShaderPathname; |
// Create shader program. |
_program = glCreateProgram(); |
// Create and compile vertex shader. |
vertShaderPathname = [[NSBundle mainBundle] pathForResource:@"Shader" ofType:@"vsh"]; |
if (![self compileShader:&vertShader type:GL_VERTEX_SHADER file:vertShaderPathname]) { |
NSLog(@"Failed to compile vertex shader"); |
return NO; |
} |
// Create and compile fragment shader. |
fragShaderPathname = [[NSBundle mainBundle] pathForResource:@"Shader" ofType:@"fsh"]; |
if (![self compileShader:&fragShader type:GL_FRAGMENT_SHADER file:fragShaderPathname]) { |
NSLog(@"Failed to compile fragment shader"); |
return NO; |
} |
// Attach vertex shader to program. |
glAttachShader(_program, vertShader); |
// Attach fragment shader to program. |
glAttachShader(_program, fragShader); |
// Bind attribute locations. |
// This needs to be done prior to linking. |
glBindAttribLocation(_program, ATTRIB_VERTEX, "position"); |
glBindAttribLocation(_program, ATTRIB_TEXCOORD, "texCoord"); |
// Link program. |
if (![self linkProgram:_program]) { |
NSLog(@"Failed to link program: %d", _program); |
if (vertShader) { |
glDeleteShader(vertShader); |
vertShader = 0; |
} |
if (fragShader) { |
glDeleteShader(fragShader); |
fragShader = 0; |
} |
if (_program) { |
glDeleteProgram(_program); |
_program = 0; |
} |
return NO; |
} |
// Get uniform locations. |
uniforms[UNIFORM_Y] = glGetUniformLocation(_program, "SamplerY"); |
uniforms[UNIFORM_UV] = glGetUniformLocation(_program, "SamplerUV"); |
// Release vertex and fragment shaders. |
if (vertShader) { |
glDetachShader(_program, vertShader); |
glDeleteShader(vertShader); |
} |
if (fragShader) { |
glDetachShader(_program, fragShader); |
glDeleteShader(fragShader); |
} |
return YES; |
} |
- (BOOL)compileShader:(GLuint *)shader type:(GLenum)type file:(NSString *)file |
{ |
GLint status; |
const GLchar *source; |
source = (GLchar *)[[NSString stringWithContentsOfFile:file encoding:NSUTF8StringEncoding error:nil] UTF8String]; |
if (!source) { |
NSLog(@"Failed to load vertex shader"); |
return NO; |
} |
*shader = glCreateShader(type); |
glShaderSource(*shader, 1, &source, NULL); |
glCompileShader(*shader); |
#if defined(DEBUG) |
GLint logLength; |
glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength); |
if (logLength > 0) { |
GLchar *log = (GLchar *)malloc(logLength); |
glGetShaderInfoLog(*shader, logLength, &logLength, log); |
NSLog(@"Shader compile log:\n%s", log); |
free(log); |
} |
#endif |
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status); |
if (status == 0) { |
glDeleteShader(*shader); |
return NO; |
} |
return YES; |
} |
- (BOOL)linkProgram:(GLuint)prog |
{ |
GLint status; |
glLinkProgram(prog); |
#if defined(DEBUG) |
GLint logLength; |
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength); |
if (logLength > 0) { |
GLchar *log = (GLchar *)malloc(logLength); |
glGetProgramInfoLog(prog, logLength, &logLength, log); |
NSLog(@"Program link log:\n%s", log); |
free(log); |
} |
#endif |
glGetProgramiv(prog, GL_LINK_STATUS, &status); |
if (status == 0) { |
return NO; |
} |
return YES; |
} |
@end |
Copyright © 2013 Apple Inc. All Rights Reserved. Terms of Use | Privacy Policy | Updated: 2013-02-21