pARk/ARView.m

/*
     File: ARView.m
 Abstract: Augmented reality view. Displays a live camera feed with specified places-of-interest overlayed in the correct position based on the direction the user is looking. Uses Core Location to determine the user's location relative the places-of-interest and Core Motion to determine the direction the user is looking.
  Version: 1.0
 
 Disclaimer: IMPORTANT:  This Apple software is supplied to you by Apple
 Inc. ("Apple") in consideration of your agreement to the following
 terms, and your use, installation, modification or redistribution of
 this Apple software constitutes acceptance of these terms.  If you do
 not agree with these terms, please do not use, install, modify or
 redistribute this Apple software.
 
 In consideration of your agreement to abide by the following terms, and
 subject to these terms, Apple grants you a personal, non-exclusive
 license, under Apple's copyrights in this original Apple software (the
 "Apple Software"), to use, reproduce, modify and redistribute the Apple
 Software, with or without modifications, in source and/or binary forms;
 provided that if you redistribute the Apple Software in its entirety and
 without modifications, you must retain this notice and the following
 text and disclaimers in all such redistributions of the Apple Software.
 Neither the name, trademarks, service marks or logos of Apple Inc. may
 be used to endorse or promote products derived from the Apple Software
 without specific prior written permission from Apple.  Except as
 expressly stated in this notice, no other rights or licenses, express or
 implied, are granted by Apple herein, including but not limited to any
 patent rights that may be infringed by your derivative works or by other
 works in which the Apple Software may be incorporated.
 
 The Apple Software is provided by Apple on an "AS IS" basis.  APPLE
 MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION
 THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS
 FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND
 OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS.
 
 IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL
 OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
 SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
 INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION,
 MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED
 AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE),
 STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE
 POSSIBILITY OF SUCH DAMAGE.
 
 Copyright (C) 2012 Apple Inc. All Rights Reserved.
 
 */
 
#import "ARView.h"
#import "PlaceOfInterest.h"
 
#import <AVFoundation/AVFoundation.h>
 
#pragma mark -
#pragma mark Math utilities declaration
 
#define DEGREES_TO_RADIANS (M_PI/180.0)
 
typedef float mat4f_t[16];  // 4x4 matrix in column major order
typedef float vec4f_t[4];   // 4D vector
 
// Creates a projection matrix using the given y-axis field-of-view, aspect ratio, and near and far clipping planes
void createProjectionMatrix(mat4f_t mout, float fovy, float aspect, float zNear, float zFar);
 
// Matrix-vector and matrix-matricx multiplication routines
void multiplyMatrixAndVector(vec4f_t vout, const mat4f_t m, const vec4f_t v);
void multiplyMatrixAndMatrix(mat4f_t c, const mat4f_t a, const mat4f_t b);
 
// Initialize mout to be an affine transform corresponding to the same rotation specified by m
void transformFromCMRotationMatrix(vec4f_t mout, const CMRotationMatrix *m);
 
#pragma mark -
#pragma mark Geodetic utilities declaration
 
#define WGS84_A (6378137.0)             // WGS 84 semi-major axis constant in meters
#define WGS84_E (8.1819190842622e-2)    // WGS 84 eccentricity
 
// Converts latitude, longitude to ECEF coordinate system
void latLonToEcef(double lat, double lon, double alt, double *x, double *y, double *z);
 
// Coverts ECEF to ENU coordinates centered at given lat, lon
void ecefToEnu(double lat, double lon, double x, double y, double z, double xr, double yr, double zr, double *e, double *n, double *u);
 
#pragma mark -
#pragma mark ARView extension
 
@interface ARView () {
    UIView *captureView;
    AVCaptureSession *captureSession;
    AVCaptureVideoPreviewLayer *captureLayer;
    
    CADisplayLink *displayLink;
    CMMotionManager *motionManager;
    CLLocationManager *locationManager;
    CLLocation *location;
    NSArray *placesOfInterest;
    mat4f_t projectionTransform;
    mat4f_t cameraTransform;    
    vec4f_t *placesOfInterestCoordinates;
}
 
- (void)initialize;
 
- (void)startCameraPreview;
- (void)stopCameraPreview;
 
- (void)startLocation;
- (void)stopLocation;
 
- (void)startDeviceMotion;
- (void)stopDeviceMotion;
 
- (void)startDisplayLink;
- (void)stopDisplayLink;
 
- (void)updatePlacesOfInterestCoordinates;
 
- (void)onDisplayLink:(id)sender;
- (void)locationManager:(CLLocationManager *)manager didUpdateToLocation:(CLLocation *)newLocation fromLocation:(CLLocation *)oldLocation;
 
@end
 
 
#pragma mark -
#pragma mark ARView implementation
 
@implementation ARView
 
@dynamic placesOfInterest;
 
- (void)dealloc
{
    [self stop];
    [placesOfInterest release];
    [location release];
    [captureView removeFromSuperview];
    [captureView release];
    if (placesOfInterestCoordinates != NULL) {
        free(placesOfInterestCoordinates);
    }
    [super dealloc];
}
 
- (void)start
{
    [self startCameraPreview];
    [self startLocation];
    [self startDeviceMotion];
    [self startDisplayLink];
}
 
- (void)stop
{
    [self stopCameraPreview];
    [self stopLocation];
    [self stopDeviceMotion];
    [self stopDisplayLink];
}
 
- (void)setPlacesOfInterest:(NSArray *)pois
{
    for (PlaceOfInterest *poi in [placesOfInterest objectEnumerator]) {
        [poi.view removeFromSuperview];
    }   
    [placesOfInterest release];
    
    placesOfInterest = [pois retain];   
    if (location != nil) {
        [self updatePlacesOfInterestCoordinates];
    }
}
 
- (NSArray *)placesOfInterest
{
    return placesOfInterest;
}
 
- (void)initialize
{
    captureView = [[UIView alloc] initWithFrame:self.bounds];
    captureView.bounds = self.bounds;
    [self addSubview:captureView];
    [self sendSubviewToBack:captureView];
    
    // Initialize projection matrix 
    createProjectionMatrix(projectionTransform, 60.0f*DEGREES_TO_RADIANS, self.bounds.size.width*1.0f / self.bounds.size.height, 0.25f, 1000.0f);
}
 
- (void)startCameraPreview
{   
    AVCaptureDevice* camera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    if (camera == nil) {
        return;
    }
    
    captureSession = [[AVCaptureSession alloc] init];
    AVCaptureDeviceInput *newVideoInput = [[[AVCaptureDeviceInput alloc] initWithDevice:camera error:nil] autorelease];
    [captureSession addInput:newVideoInput];
    
    captureLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
    captureLayer.frame = captureView.bounds;
    [captureLayer setOrientation:AVCaptureVideoOrientationPortrait];
    [captureLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
    [captureView.layer addSublayer:captureLayer];
    
    // Start the session. This is done asychronously since -startRunning doesn't return until the session is running.
    dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
        [captureSession startRunning];
    });
}
 
- (void)stopCameraPreview
{   
    [captureSession stopRunning];
    [captureLayer removeFromSuperlayer];
    [captureSession release];
    [captureLayer release];
    captureSession = nil;
    captureLayer = nil;
}
 
- (void)startLocation
{
    [locationManager release];
    locationManager = [[CLLocationManager alloc] init];
    locationManager.delegate = self;
    locationManager.distanceFilter = 100.0;
    [locationManager startUpdatingLocation];
}
 
- (void)stopLocation
{
    [locationManager stopUpdatingLocation];
    [locationManager release];
    locationManager = nil;
}
 
- (void)startDeviceMotion
{   
    motionManager = [[CMMotionManager alloc] init];
    
    // Tell CoreMotion to show the compass calibration HUD when required to provide true north-referenced attitude
    motionManager.showsDeviceMovementDisplay = YES;
 
    
    motionManager.deviceMotionUpdateInterval = 1.0 / 60.0;
    
    // New in iOS 5.0: Attitude that is referenced to true north
    [motionManager startDeviceMotionUpdatesUsingReferenceFrame:CMAttitudeReferenceFrameXTrueNorthZVertical];
}
 
- (void)stopDeviceMotion
{
    [motionManager stopDeviceMotionUpdates];
    [motionManager release];
    motionManager = nil;
}
 
- (void)startDisplayLink
{
    displayLink = [[CADisplayLink displayLinkWithTarget:self selector:@selector(onDisplayLink:)] retain];
    [displayLink setFrameInterval:1];
    [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
 
- (void)stopDisplayLink
{
    [displayLink invalidate];
    [displayLink release];
    displayLink = nil;      
}
 
- (void)updatePlacesOfInterestCoordinates
{
    
    if (placesOfInterestCoordinates != NULL) {
        free(placesOfInterestCoordinates);
    }
    placesOfInterestCoordinates = (vec4f_t *)malloc(sizeof(vec4f_t)*placesOfInterest.count);
            
    int i = 0;
    
    double myX, myY, myZ;
    latLonToEcef(location.coordinate.latitude, location.coordinate.longitude, 0.0, &myX, &myY, &myZ);
 
    // Array of NSData instances, each of which contains a struct with the distance to a POI and the
    // POI's index into placesOfInterest
    // Will be used to ensure proper Z-ordering of UIViews
    typedef struct {
        float distance;
        int index;
    } DistanceAndIndex;
    NSMutableArray *orderedDistances = [NSMutableArray arrayWithCapacity:placesOfInterest.count];
 
    // Compute the world coordinates of each place-of-interest
    for (PlaceOfInterest *poi in [[self placesOfInterest] objectEnumerator]) {
        double poiX, poiY, poiZ, e, n, u;
        
        latLonToEcef(poi.location.coordinate.latitude, poi.location.coordinate.longitude, 0.0, &poiX, &poiY, &poiZ);
        ecefToEnu(location.coordinate.latitude, location.coordinate.longitude, myX, myY, myZ, poiX, poiY, poiZ, &e, &n, &u);
        
        placesOfInterestCoordinates[i][0] = (float)n;
        placesOfInterestCoordinates[i][1]= -(float)e;
        placesOfInterestCoordinates[i][2] = 0.0f;
        placesOfInterestCoordinates[i][3] = 1.0f;
        
        // Add struct containing distance and index to orderedDistances
        DistanceAndIndex distanceAndIndex;
        distanceAndIndex.distance = sqrtf(n*n + e*e);
        distanceAndIndex.index = i;
        [orderedDistances insertObject:[NSData dataWithBytes:&distanceAndIndex length:sizeof(distanceAndIndex)] atIndex:i++];
    }
    
    // Sort orderedDistances in ascending order based on distance from the user
    [orderedDistances sortUsingComparator:(NSComparator)^(NSData *a, NSData *b) {
        const DistanceAndIndex *aData = (const DistanceAndIndex *)a.bytes;
        const DistanceAndIndex *bData = (const DistanceAndIndex *)b.bytes;
        if (aData->distance < bData->distance) {
            return NSOrderedAscending;
        } else if (aData->distance > bData->distance) {
            return NSOrderedDescending;
        } else {
            return NSOrderedSame;
        }
    }];
    
    // Add subviews in descending Z-order so they overlap properly
    for (NSData *d in [orderedDistances reverseObjectEnumerator]) {
        const DistanceAndIndex *distanceAndIndex = (const DistanceAndIndex *)d.bytes;
        PlaceOfInterest *poi = (PlaceOfInterest *)[placesOfInterest objectAtIndex:distanceAndIndex->index];     
        [self addSubview:poi.view];
    }   
}
 
- (void)onDisplayLink:(id)sender
{
    CMDeviceMotion *d = motionManager.deviceMotion;
    if (d != nil) {
        CMRotationMatrix r = d.attitude.rotationMatrix;
        transformFromCMRotationMatrix(cameraTransform, &r);
        [self setNeedsDisplay];
    }
}
 
- (void)drawRect:(CGRect)rect
{
    if (placesOfInterestCoordinates == nil) {
        return;
    }
    
    mat4f_t projectionCameraTransform;
    multiplyMatrixAndMatrix(projectionCameraTransform, projectionTransform, cameraTransform);
    
    int i = 0;
    for (PlaceOfInterest *poi in [placesOfInterest objectEnumerator]) {
        vec4f_t v;
        multiplyMatrixAndVector(v, projectionCameraTransform, placesOfInterestCoordinates[i]);
        
        float x = (v[0] / v[3] + 1.0f) * 0.5f;
        float y = (v[1] / v[3] + 1.0f) * 0.5f;
        if (v[2] < 0.0f) {
            poi.view.center = CGPointMake(x*self.bounds.size.width, self.bounds.size.height-y*self.bounds.size.height);
            poi.view.hidden = NO;
        } else {
            poi.view.hidden = YES;
        }
        i++;
    }
 
}
 
- (void)locationManager:(CLLocationManager *)manager didUpdateToLocation:(CLLocation *)newLocation fromLocation:(CLLocation *)oldLocation
{
    [location release];
    location = [newLocation retain];
    if (placesOfInterest != nil) {
        [self updatePlacesOfInterestCoordinates];
    }   
}
 
- (id)initWithFrame:(CGRect)frame
{
    self = [super initWithFrame:frame];
    if (self) {
        [self initialize];
    }
    return self;
}
 
- (id)initWithCoder:(NSCoder *)aDecoder
{
    self = [super initWithCoder:aDecoder];
    if (self) {
        [self initialize];
    }
    return self;
}
 
@end
 
#pragma mark -
#pragma mark Math utilities definition
 
// Creates a projection matrix using the given y-axis field-of-view, aspect ratio, and near and far clipping planes
void createProjectionMatrix(mat4f_t mout, float fovy, float aspect, float zNear, float zFar)
{
    float f = 1.0f / tanf(fovy/2.0f);
    
    mout[0] = f / aspect;
    mout[1] = 0.0f;
    mout[2] = 0.0f;
    mout[3] = 0.0f;
    
    mout[4] = 0.0f;
    mout[5] = f;
    mout[6] = 0.0f;
    mout[7] = 0.0f;
    
    mout[8] = 0.0f;
    mout[9] = 0.0f;
    mout[10] = (zFar+zNear) / (zNear-zFar);
    mout[11] = -1.0f;
    
    mout[12] = 0.0f;
    mout[13] = 0.0f;
    mout[14] = 2 * zFar * zNear /  (zNear-zFar);
    mout[15] = 0.0f;
}
 
// Matrix-vector and matrix-matricx multiplication routines
void multiplyMatrixAndVector(vec4f_t vout, const mat4f_t m, const vec4f_t v)
{
    vout[0] = m[0]*v[0] + m[4]*v[1] + m[8]*v[2] + m[12]*v[3];
    vout[1] = m[1]*v[0] + m[5]*v[1] + m[9]*v[2] + m[13]*v[3];
    vout[2] = m[2]*v[0] + m[6]*v[1] + m[10]*v[2] + m[14]*v[3];
    vout[3] = m[3]*v[0] + m[7]*v[1] + m[11]*v[2] + m[15]*v[3];
}
 
void multiplyMatrixAndMatrix(mat4f_t c, const mat4f_t a, const mat4f_t b)
{
    uint8_t col, row, i;
    memset(c, 0, 16*sizeof(float));
    
    for (col = 0; col < 4; col++) {
        for (row = 0; row < 4; row++) {
            for (i = 0; i < 4; i++) {
                c[col*4+row] += a[i*4+row]*b[col*4+i];
            }
        }
    }
}
 
// Initialize mout to be an affine transform corresponding to the same rotation specified by m
void transformFromCMRotationMatrix(vec4f_t mout, const CMRotationMatrix *m)
{
    mout[0] = (float)m->m11;
    mout[1] = (float)m->m21;
    mout[2] = (float)m->m31;
    mout[3] = 0.0f;
    
    mout[4] = (float)m->m12;
    mout[5] = (float)m->m22;
    mout[6] = (float)m->m32;
    mout[7] = 0.0f;
    
    mout[8] = (float)m->m13;
    mout[9] = (float)m->m23;
    mout[10] = (float)m->m33;
    mout[11] = 0.0f;
    
    mout[12] = 0.0f;
    mout[13] = 0.0f;
    mout[14] = 0.0f;
    mout[15] = 1.0f;
}
 
#pragma mark -
#pragma mark Geodetic utilities definition
 
// References to ECEF and ECEF to ENU conversion may be found on the web.
 
// Converts latitude, longitude to ECEF coordinate system
void latLonToEcef(double lat, double lon, double alt, double *x, double *y, double *z)
{   
    double clat = cos(lat * DEGREES_TO_RADIANS);
    double slat = sin(lat * DEGREES_TO_RADIANS);
    double clon = cos(lon * DEGREES_TO_RADIANS);
    double slon = sin(lon * DEGREES_TO_RADIANS);
    
    double N = WGS84_A / sqrt(1.0 - WGS84_E * WGS84_E * slat * slat);
    
    *x = (N + alt) * clat * clon;
    *y = (N + alt) * clat * slon;
    *z = (N * (1.0 - WGS84_E * WGS84_E) + alt) * slat;
}
 
// Coverts ECEF to ENU coordinates centered at given lat, lon
void ecefToEnu(double lat, double lon, double x, double y, double z, double xr, double yr, double zr, double *e, double *n, double *u)
{
    double clat = cos(lat * DEGREES_TO_RADIANS);
    double slat = sin(lat * DEGREES_TO_RADIANS);
    double clon = cos(lon * DEGREES_TO_RADIANS);
    double slon = sin(lon * DEGREES_TO_RADIANS);
    double dx = x - xr;
    double dy = y - yr;
    double dz = z - zr;
    
    *e = -slon*dx  + clon*dy;
    *n = -slat*clon*dx - slat*slon*dy + clat*dz;
    *u = clat*clon*dx + clat*slon*dy + slat*dz;
}