📄 oalspatialview.m
字号:
/* File: oalSpatialView.m Abstract: A visual representation of our sound stage. The sound source and listener are represented by images in CALayer objects. The position and rotation of these layers corresponds with their position and rotation within the OpenAL environment. Version: 1.0 Disclaimer: IMPORTANT: This Apple software is supplied to you by Apple Inc. ("Apple") in consideration of your agreement to the following terms, and your use, installation, modification or redistribution of this Apple software constitutes acceptance of these terms. If you do not agree with these terms, please do not use, install, modify or redistribute this Apple software. In consideration of your agreement to abide by the following terms, and subject to these terms, Apple grants you a personal, non-exclusive license, under Apple's copyrights in this original Apple software (the "Apple Software"), to use, reproduce, modify and redistribute the Apple Software, with or without modifications, in source and/or binary forms; provided that if you redistribute the Apple Software in its entirety and without modifications, you must retain this notice and the following text and disclaimers in all such redistributions of the Apple Software. Neither the name, trademarks, service marks or logos of Apple Inc. may be used to endorse or promote products derived from the Apple Software without specific prior written permission from Apple. Except as expressly stated in this notice, no other rights or licenses, express or implied, are granted by Apple herein, including but not limited to any patent rights that may be infringed by your derivative works or by other works in which the Apple Software may be incorporated. The Apple Software is provided by Apple on an "AS IS" basis. APPLE MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS. IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION, MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Copyright (C) 2008 Apple Inc. All Rights Reserved. */#import "oalSpatialView.h"#import "oalPlayback.h"CGPathRef CreateRoundedRectPath(CGRect RECT, CGFloat cornerRadius){ CGMutablePathRef path; path = CGPathCreateMutable(); double maxRad = MAX(CGRectGetHeight(RECT) / 2., CGRectGetWidth(RECT) / 2.); if (cornerRadius > maxRad) cornerRadius = maxRad; CGPoint bl, tl, tr, br; bl = tl = tr = br = RECT.origin; tl.y += RECT.size.height; tr.y += RECT.size.height; tr.x += RECT.size.width; br.x += RECT.size.width; CGPathMoveToPoint(path, NULL, bl.x + cornerRadius, bl.y); CGPathAddArcToPoint(path, NULL, bl.x, bl.y, bl.x, bl.y + cornerRadius, cornerRadius); CGPathAddLineToPoint(path, NULL, tl.x, tl.y - cornerRadius); CGPathAddArcToPoint(path, NULL, tl.x, tl.y, tl.x + cornerRadius, tl.y, cornerRadius); CGPathAddLineToPoint(path, NULL, tr.x - cornerRadius, tr.y); CGPathAddArcToPoint(path, NULL, tr.x, tr.y, tr.x, tr.y - cornerRadius, cornerRadius); CGPathAddLineToPoint(path, NULL, br.x, br.y + cornerRadius); CGPathAddArcToPoint(path, NULL, br.x, br.y, br.x - cornerRadius, br.y, cornerRadius); CGPathCloseSubpath(path); CGPathRef ret; ret = CGPathCreateCopy(path); CGPathRelease(path); return ret;}@implementation oalSpatialView#pragma mark Object Init / Maintenance- (id)initWithCoder:(NSCoder *)coder{ if (self = [super initWithCoder:coder]) { [self initializeContents]; } return self;}- (void)dealloc{ [_playback removeObserver:self forKeyPath:@"sourcePos"]; [_playback removeObserver:self forKeyPath:@"isPlaying"]; [_playback removeObserver:self forKeyPath:@"listenerPos"]; [_playback removeObserver:self forKeyPath:@"listenerRotation"]; [_playback removeObserver:self forKeyPath:@"sourceVolume"]; CGImageRelease(_speaker_off); CGImageRelease(_speaker_on_0); CGImageRelease(_speaker_on_1); CGImageRelease(_speaker_on_2); CGImageRelease(_speaker_on_3); CGImageRelease(_speaker_on_4); [super dealloc];}- (void)awakeFromNib{ // Initially hide our volume view _volume_view.hidden = YES; UIImage *bgImage = nil; // Draw a transparent rounded rect into the image for the background { CGColorSpaceRef cs = NULL; CGContextRef cxt = NULL; CGImageRef img = NULL; CGPathRef roundRectPath = NULL; cs = CGColorSpaceCreateDeviceRGB(); cxt = CGBitmapContextCreate(NULL, _volume_view.frame.size.width, _volume_view.frame.size.height, 8, 4 * _volume_view.frame.size.width, cs, kCGImageAlphaPremultipliedFirst); roundRectPath = CreateRoundedRectPath( CGRectInset(CGRectMake(0., 0., _volume_view.frame.size.width, _volume_view.frame.size.height), 2., 2. ), 18. ); CGFloat fillClr[] = {1., 1., 1., 0.1}; CGFloat strokeClr[] = {1., 1., 1., 0.23}; CGContextSetFillColorSpace(cxt, cs); CGContextSetStrokeColorSpace(cxt, cs); CGContextSetFillColor(cxt, fillClr); CGContextSetStrokeColor(cxt, strokeClr); CGContextAddPath(cxt, roundRectPath); CGContextSetLineWidth(cxt, 1.5); CGContextDrawPath(cxt, kCGPathFillStroke); img = CGBitmapContextCreateImage(cxt); bgImage = [UIImage imageWithCGImage:img]; CGImageRelease(img); CGPathRelease(roundRectPath); CGContextRelease(cxt); CGColorSpaceRelease(cs); } // Set the background image of the volume view backing _volume_view.image = bgImage; // Offset our volume view to compensate for the special transform we're putting on this view's layer _volume_view.frame = CGRectOffset(_volume_view.frame, -([self frame].size.width / 2.), -([self frame].size.height / 2.)); // Set the volume slider to the volume level for the source _volume_slider.value = _playback.sourceVolume; // We want to register as an observer for the oalPlayback environment, so we'll get notified when things // change, i.e. source position, listener position. [_playback addObserver:self forKeyPath:@"sourcePos" options:NSKeyValueObservingOptionNew context:NULL]; [_playback addObserver:self forKeyPath:@"isPlaying" options:NSKeyValueObservingOptionNew context:NULL]; [_playback addObserver:self forKeyPath:@"listenerPos" options:NSKeyValueObservingOptionNew context:NULL]; [_playback addObserver:self forKeyPath:@"listenerRotation" options:NSKeyValueObservingOptionNew context:NULL]; [_playback addObserver:self forKeyPath:@"sourceVolume" options:NSKeyValueObservingOptionNew context:NULL]; [self layoutContents];}#pragma mark KVO- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context{ // Generally, we just call [self layoutContents] whenever something changes in the oalPlayback environment. // When the sound sound source is turned on or off, we also change the image for the speaker to either show // or hide the sound waves. if ( (object == _playback) && ([keyPath isEqualToString:@"sourcePos"]) ) { [self layoutContents]; } else if ( (object == _playback) && (([keyPath isEqualToString:@"isPlaying"]) || ([keyPath isEqualToString:@"sourceVolume"])) ) { [self layoutContents]; if (_playback.isPlaying) { int speaker_volLvl = ceil(_playback.sourceVolume * 4.); CGImageRef spkrImg; switch(speaker_volLvl) { case(0): spkrImg = _speaker_on_0; break; case(1): spkrImg = _speaker_on_1; break; case(2): spkrImg = _speaker_on_2; break; case(3): spkrImg = _speaker_on_3; break; default: spkrImg = _speaker_on_4; } _speakerLayer.contents = (id)spkrImg; } else _speakerLayer.contents = (id)_speaker_off; } else if ( (object == _playback) && ([keyPath isEqualToString:@"listenerPos"]) ) { [self layoutContents]; } else if ( (object == _playback) && ([keyPath isEqualToString:@"listenerRotation"]) ) { [self layoutContents]; } else { [NSException raise:@"Error" format:@"%@ observing unexpected keypath %@ for object %@", self, keyPath, object]; }}#pragma mark View contents- (void)initializeContents{ // Load images for the two speaker states and retain them, because we'll be switching between them _speaker_off = [[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"speaker_off" ofType:@"png"]] CGImage]; CGImageRetain(_speaker_off); _speaker_on_0 = [[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"speaker_on_0" ofType:@"png"]] CGImage]; _speaker_on_1 = [[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"speaker_on_1" ofType:@"png"]] CGImage]; _speaker_on_2 = [[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"speaker_on_2" ofType:@"png"]] CGImage]; _speaker_on_3 = [[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"speaker_on_3" ofType:@"png"]] CGImage]; _speaker_on_4 = [[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"speaker_on_4" ofType:@"png"]] CGImage]; CGImageRetain(_speaker_on_0); CGImageRetain(_speaker_on_1); CGImageRetain(_speaker_on_2); CGImageRetain(_speaker_on_3); CGImageRetain(_speaker_on_4); CGImageRef listenerImg = [[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"listener" ofType:@"png"]] CGImage]; CGImageRef instructionsImg = [[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"instructions" ofType:@"png"]] CGImage]; // Set up the CALayer which shows the speaker _speakerLayer = [CALayer layer]; _speakerLayer.frame = CGRectMake(0., 0., CGImageGetWidth(_speaker_off), CGImageGetHeight(_speaker_off)); _speakerLayer.contents = (id)_speaker_off; // Set up the CALayer which shows the listener _listenerLayer = [CALayer layer]; _listenerLayer.frame = CGRectMake(0., 0., CGImageGetWidth(listenerImg), CGImageGetHeight(listenerImg)); _listenerLayer.contents = (id)listenerImg; _listenerLayer.anchorPoint = CGPointMake(0.5, 0.57); // Set up the CALayer which shows the instructions _instructionsLayer = [CALayer layer]; _instructionsLayer.frame = CGRectMake(0., 0., CGImageGetWidth(instructionsImg), CGImageGetHeight(instructionsImg)); _instructionsLayer.position = CGPointMake(0., -140.); _instructionsLayer.contents = (id)instructionsImg; // Set a sublayerTransform on our view's layer. This causes (0,0) to be in the center of the view. This transform // is useful because now our view's coordinates map precisely to our oalPlayback sound environment's coordinates. CATransform3D trans = CATransform3DMakeTranslation([self frame].size.width / 2., [self frame].size.height / 2., 0.); self.layer.sublayerTransform = trans; // Set the background image for the sound stage CGImageRef bgImg = [[UIImage imageWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"stagebg" ofType:@"png"]] CGImage]; self.layer.contents = (id)bgImg; // Add our sublayers [self.layer insertSublayer:_speakerLayer above:self.layer]; [self.layer insertSublayer:_listenerLayer above:self.layer]; [self.layer insertSublayer:_instructionsLayer above:self.layer]; // Prevent things from drawing outside our layer bounds self.layer.masksToBounds = YES; }- (void)layoutContents{ // layoutContents gets called via KVO whenever properties within our oalPlayback object change // Wrap these layer changes in a transaction and set the animation duration to 0 so we don't get implicit animation [CATransaction begin]; [CATransaction setValue:[NSNumber numberWithDouble:0.] forKey:kCATransactionAnimationDuration]; // Position and rotate the listener _listenerLayer.position = _playback.listenerPos; _listenerLayer.transform = CATransform3DMakeRotation(_playback.listenerRotation, 0., 0., 1.); // The speaker gets rotated so that it's always facing the listener CGFloat rot = atan2(-(_playback.sourcePos.x - _playback.listenerPos.x), _playback.sourcePos.y - _playback.listenerPos.y); // Rotate and position the speaker _speakerLayer.position = _playback.sourcePos; _speakerLayer.transform = CATransform3DMakeRotation(rot, 0., 0., 1.); [CATransaction commit];}#pragma mark Events- (void)touchPoint:(CGPoint)pt{ if (!(_instructionsLayer.hidden)) _instructionsLayer.hidden = YES; if (_draggingLayer == _speakerLayer) _playback.sourcePos = pt; else if (_draggingLayer == _listenerLayer) _playback.listenerPos = pt;}- (void)touchesBegan:(NSSet*)touches withEvent:(UIEvent*)event{ CGPoint pointInView = [[touches anyObject] locationInView:self]; _volume_view.hidden = YES; // Clip our pointInView to within 5 pixels of any edge, so we can't position objects near or beyond // the edge of the sound stage pointInView = CGPointWithinBounds(pointInView, CGRectInset([self bounds], 5., 5.)); // Convert the view point to our layer / sound stage coordinate system, which is centered at (0,0) CGPoint pointInLayer = CGPointMake(pointInView.x - [self frame].size.width / 2., pointInView.y - [self frame].size.height / 2.); // Find out if the distance between the touch is within the tolerance threshhold for moving // the source object or the listener object if (hypot(_playback.sourcePos.x - pointInLayer.x, _playback.sourcePos.y - pointInLayer.y) < kTouchDistanceThreshhold) { if (((UITouch *)[touches anyObject]).tapCount >= 2) { _volume_view.hidden = NO; } else { _draggingLayer = _speakerLayer; } } else if (hypot(_playback.listenerPos.x - pointInLayer.x, _playback.listenerPos.y - pointInLayer.y) < kTouchDistanceThreshhold) { _draggingLayer = _listenerLayer; } else { _draggingLayer = nil; } // Handle the touch [self touchPoint:pointInLayer];}- (void)touchesMoved:(NSSet*)touches withEvent:(UIEvent*)event{ // Called repeatedly as the touch moves CGPoint pointInView = [[touches anyObject] locationInView:self]; pointInView = CGPointWithinBounds(pointInView, CGRectInset([self bounds], 5., 5.)); CGPoint pointInLayer = CGPointMake(pointInView.x - [self frame].size.width / 2., pointInView.y - [self frame].size.height / 2.); [self touchPoint:pointInLayer];}- (void)touchesEnded:(NSSet*)touches withEvent:(UIEvent*)event{ _draggingLayer = nil;}@end
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -