2014-02-10 01:10:30 +00:00
|
|
|
/*************************************************************************/
|
|
|
|
/* gl_view.mm */
|
|
|
|
/*************************************************************************/
|
|
|
|
/* This file is part of: */
|
|
|
|
/* GODOT ENGINE */
|
|
|
|
/* http://www.godotengine.org */
|
|
|
|
/*************************************************************************/
|
2017-01-01 21:01:57 +00:00
|
|
|
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
|
2014-02-10 01:10:30 +00:00
|
|
|
/* */
|
|
|
|
/* Permission is hereby granted, free of charge, to any person obtaining */
|
|
|
|
/* a copy of this software and associated documentation files (the */
|
|
|
|
/* "Software"), to deal in the Software without restriction, including */
|
|
|
|
/* without limitation the rights to use, copy, modify, merge, publish, */
|
|
|
|
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
|
|
|
/* permit persons to whom the Software is furnished to do so, subject to */
|
|
|
|
/* the following conditions: */
|
|
|
|
/* */
|
|
|
|
/* The above copyright notice and this permission notice shall be */
|
|
|
|
/* included in all copies or substantial portions of the Software. */
|
|
|
|
/* */
|
|
|
|
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
|
|
|
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
|
|
|
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
|
|
|
|
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
|
|
|
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
|
|
|
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
|
|
|
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
|
|
|
/*************************************************************************/
|
|
|
|
|
|
|
|
#import <QuartzCore/QuartzCore.h>
|
|
|
|
#import <OpenGLES/EAGLDrawable.h>
|
|
|
|
#include "os_iphone.h"
|
|
|
|
#include "core/os/keyboard.h"
|
2017-02-21 03:05:15 +00:00
|
|
|
#include "core/global_config.h"
|
2014-05-14 04:22:15 +00:00
|
|
|
#include "servers/audio_server.h"
|
2014-02-10 01:10:30 +00:00
|
|
|
|
|
|
|
#import "gl_view.h"
|
|
|
|
|
|
|
|
/*
|
|
|
|
@interface GLView (private)
|
|
|
|
|
|
|
|
- (id)initGLES;
|
|
|
|
- (BOOL)createFramebuffer;
|
|
|
|
- (void)destroyFramebuffer;
|
|
|
|
@end
|
|
|
|
*/
|
|
|
|
|
|
|
|
int gl_view_base_fb;
|
|
|
|
static String keyboard_text;
|
|
|
|
static GLView* _instance = NULL;
|
|
|
|
|
2014-05-14 04:22:15 +00:00
|
|
|
static bool video_found_error = false;
|
2014-05-24 04:35:47 +00:00
|
|
|
static bool video_playing = false;
|
2014-05-14 04:22:15 +00:00
|
|
|
static float video_previous_volume = 0.0f;
|
2014-11-26 17:24:13 +00:00
|
|
|
static CMTime video_current_time;
|
2014-05-14 04:22:15 +00:00
|
|
|
|
2015-11-16 00:07:21 +00:00
|
|
|
void _show_keyboard(String);
|
|
|
|
void _hide_keyboard();
|
|
|
|
bool _play_video(String, float, String, String);
|
|
|
|
bool _is_video_playing();
|
2016-02-19 04:09:06 +00:00
|
|
|
void _pause_video();
|
2015-11-16 00:07:21 +00:00
|
|
|
void _focus_out_video();
|
|
|
|
void _unpause_video();
|
|
|
|
void _stop_video();
|
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
void _show_keyboard(String p_existing) {
|
|
|
|
keyboard_text = p_existing;
|
|
|
|
printf("instance on show is %p\n", _instance);
|
|
|
|
[_instance open_keyboard];
|
|
|
|
};
|
|
|
|
|
|
|
|
void _hide_keyboard() {
|
|
|
|
printf("instance on hide is %p\n", _instance);
|
|
|
|
[_instance hide_keyboard];
|
|
|
|
keyboard_text = "";
|
|
|
|
};
|
|
|
|
|
2014-11-17 10:46:11 +00:00
|
|
|
bool _play_video(String p_path, float p_volume, String p_audio_track, String p_subtitle_track) {
|
2017-01-05 12:16:00 +00:00
|
|
|
p_path = GlobalConfig::get_singleton()->globalize_path(p_path);
|
2014-11-17 10:46:11 +00:00
|
|
|
|
|
|
|
NSString* file_path = [[[NSString alloc] initWithUTF8String:p_path.utf8().get_data()] autorelease];
|
|
|
|
|
|
|
|
_instance.avAsset = [AVAsset assetWithURL:[NSURL fileURLWithPath:file_path]];
|
2016-02-19 04:09:06 +00:00
|
|
|
|
2014-11-17 10:46:11 +00:00
|
|
|
_instance.avPlayerItem =[[AVPlayerItem alloc]initWithAsset:_instance.avAsset];
|
2014-11-26 17:24:13 +00:00
|
|
|
[_instance.avPlayerItem addObserver:_instance forKeyPath:@"status" options:0 context:nil];
|
|
|
|
|
2016-02-19 04:09:06 +00:00
|
|
|
_instance.avPlayer = [[AVPlayer alloc]initWithPlayerItem:_instance.avPlayerItem];
|
|
|
|
_instance.avPlayerLayer =[AVPlayerLayer playerLayerWithPlayer:_instance.avPlayer];
|
2014-11-17 10:46:11 +00:00
|
|
|
|
2016-02-19 04:09:06 +00:00
|
|
|
[_instance.avPlayer addObserver:_instance forKeyPath:@"status" options:0 context:nil];
|
|
|
|
[[NSNotificationCenter defaultCenter] addObserver:_instance
|
2014-11-17 10:46:11 +00:00
|
|
|
selector:@selector(playerItemDidReachEnd:)
|
|
|
|
name:AVPlayerItemDidPlayToEndTimeNotification
|
|
|
|
object:[_instance.avPlayer currentItem]];
|
|
|
|
|
2015-03-03 17:39:13 +00:00
|
|
|
[_instance.avPlayer addObserver:_instance forKeyPath:@"rate" options:NSKeyValueObservingOptionNew context:0];
|
|
|
|
|
2016-02-19 04:09:06 +00:00
|
|
|
[_instance.avPlayerLayer setFrame:_instance.bounds];
|
|
|
|
[_instance.layer addSublayer:_instance.avPlayerLayer];
|
|
|
|
[_instance.avPlayer play];
|
2014-11-17 10:46:11 +00:00
|
|
|
|
|
|
|
AVMediaSelectionGroup *audioGroup = [_instance.avAsset mediaSelectionGroupForMediaCharacteristic: AVMediaCharacteristicAudible];
|
|
|
|
|
2014-12-02 17:02:41 +00:00
|
|
|
NSMutableArray *allAudioParams = [NSMutableArray array];
|
2014-11-17 10:46:11 +00:00
|
|
|
for (id track in audioGroup.options)
|
|
|
|
{
|
|
|
|
NSString* language = [[track locale] localeIdentifier];
|
|
|
|
NSLog(@"subtitle lang: %@", language);
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-11-17 10:46:11 +00:00
|
|
|
if ([language isEqualToString:[NSString stringWithUTF8String:p_audio_track.utf8()]])
|
|
|
|
{
|
2014-12-02 17:02:41 +00:00
|
|
|
AVMutableAudioMixInputParameters *audioInputParams = [AVMutableAudioMixInputParameters audioMixInputParameters];
|
|
|
|
[audioInputParams setVolume:p_volume atTime:kCMTimeZero];
|
|
|
|
[audioInputParams setTrackID:[track trackID]];
|
|
|
|
[allAudioParams addObject:audioInputParams];
|
|
|
|
|
|
|
|
AVMutableAudioMix *audioMix = [AVMutableAudioMix audioMix];
|
|
|
|
[audioMix setInputParameters:allAudioParams];
|
|
|
|
|
|
|
|
[_instance.avPlayer.currentItem selectMediaOption:track inMediaSelectionGroup: audioGroup];
|
|
|
|
[_instance.avPlayer.currentItem setAudioMix:audioMix];
|
|
|
|
|
2014-11-17 10:46:11 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
AVMediaSelectionGroup *subtitlesGroup = [_instance.avAsset mediaSelectionGroupForMediaCharacteristic: AVMediaCharacteristicLegible];
|
|
|
|
NSArray *useableTracks = [AVMediaSelectionGroup mediaSelectionOptionsFromArray:subtitlesGroup.options withoutMediaCharacteristics:[NSArray arrayWithObject:AVMediaCharacteristicContainsOnlyForcedSubtitles]];
|
|
|
|
|
|
|
|
for (id track in useableTracks)
|
|
|
|
{
|
|
|
|
NSString* language = [[track locale] localeIdentifier];
|
|
|
|
NSLog(@"subtitle lang: %@", language);
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-11-17 10:46:11 +00:00
|
|
|
if ([language isEqualToString:[NSString stringWithUTF8String:p_subtitle_track.utf8()]])
|
|
|
|
{
|
|
|
|
[_instance.avPlayer.currentItem selectMediaOption:track inMediaSelectionGroup: subtitlesGroup];
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-02-19 04:09:06 +00:00
|
|
|
video_playing = true;
|
2014-11-17 10:46:11 +00:00
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
2014-03-14 01:57:24 +00:00
|
|
|
|
|
|
|
bool _is_video_playing() {
|
2016-02-19 04:09:06 +00:00
|
|
|
if (_instance.avPlayer.error) {
|
2016-04-02 18:26:12 +00:00
|
|
|
printf("Error during playback\n");
|
2016-02-19 04:09:06 +00:00
|
|
|
}
|
|
|
|
return (_instance.avPlayer.rate > 0 && !_instance.avPlayer.error);
|
2014-03-14 01:57:24 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
void _pause_video() {
|
2014-11-26 17:24:13 +00:00
|
|
|
video_current_time = _instance.avPlayer.currentTime;
|
2014-11-17 10:46:11 +00:00
|
|
|
[_instance.avPlayer pause];
|
2014-05-24 04:35:47 +00:00
|
|
|
video_playing = false;
|
2014-03-14 01:57:24 +00:00
|
|
|
}
|
|
|
|
|
2014-12-02 17:02:41 +00:00
|
|
|
void _focus_out_video() {
|
|
|
|
printf("focus out pausing video\n");
|
|
|
|
[_instance.avPlayer pause];
|
|
|
|
};
|
|
|
|
|
2014-11-26 17:24:13 +00:00
|
|
|
void _unpause_video() {
|
2014-12-02 17:02:41 +00:00
|
|
|
|
2014-11-26 17:24:13 +00:00
|
|
|
[_instance.avPlayer play];
|
|
|
|
video_playing = true;
|
|
|
|
};
|
|
|
|
|
2014-03-14 01:57:24 +00:00
|
|
|
void _stop_video() {
|
2014-11-17 10:46:11 +00:00
|
|
|
[_instance.avPlayer pause];
|
|
|
|
[_instance.avPlayerLayer removeFromSuperlayer];
|
|
|
|
_instance.avPlayer = nil;
|
2014-05-24 04:35:47 +00:00
|
|
|
video_playing = false;
|
2014-03-14 01:57:24 +00:00
|
|
|
}
|
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
@implementation GLView
|
|
|
|
|
|
|
|
@synthesize animationInterval;
|
|
|
|
|
|
|
|
static const int max_touches = 8;
|
|
|
|
static UITouch* touches[max_touches];
|
|
|
|
|
|
|
|
static void init_touches() {
|
|
|
|
|
|
|
|
for (int i=0; i<max_touches; i++) {
|
|
|
|
touches[i] = NULL;
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
static int get_touch_id(UITouch* p_touch) {
|
|
|
|
|
|
|
|
int first = -1;
|
|
|
|
for (int i=0; i<max_touches; i++) {
|
|
|
|
if (first == -1 && touches[i] == NULL) {
|
|
|
|
first = i;
|
|
|
|
continue;
|
|
|
|
};
|
|
|
|
if (touches[i] == p_touch)
|
|
|
|
return i;
|
|
|
|
};
|
|
|
|
|
|
|
|
if (first != -1) {
|
|
|
|
touches[first] = p_touch;
|
|
|
|
return first;
|
|
|
|
};
|
|
|
|
|
|
|
|
return -1;
|
|
|
|
};
|
|
|
|
|
|
|
|
static int remove_touch(UITouch* p_touch) {
|
|
|
|
|
|
|
|
int remaining = 0;
|
|
|
|
for (int i=0; i<max_touches; i++) {
|
|
|
|
|
|
|
|
if (touches[i] == NULL)
|
|
|
|
continue;
|
|
|
|
if (touches[i] == p_touch)
|
|
|
|
touches[i] = NULL;
|
|
|
|
else
|
|
|
|
++remaining;
|
|
|
|
};
|
|
|
|
return remaining;
|
|
|
|
};
|
|
|
|
|
|
|
|
static int get_first_id(UITouch* p_touch) {
|
|
|
|
|
|
|
|
for (int i=0; i<max_touches; i++) {
|
|
|
|
|
|
|
|
if (touches[i] != NULL)
|
|
|
|
return i;
|
|
|
|
};
|
|
|
|
return -1;
|
|
|
|
};
|
|
|
|
|
|
|
|
static void clear_touches() {
|
|
|
|
|
|
|
|
for (int i=0; i<max_touches; i++) {
|
|
|
|
|
|
|
|
touches[i] = NULL;
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
// Implement this to override the default layer class (which is [CALayer class]).
|
|
|
|
// We do this so that our view will be backed by a layer that is capable of OpenGL ES rendering.
|
|
|
|
+ (Class) layerClass
|
|
|
|
{
|
|
|
|
return [CAEAGLLayer class];
|
|
|
|
}
|
|
|
|
|
|
|
|
//The GL view is stored in the nib file. When it's unarchived it's sent -initWithCoder:
|
|
|
|
- (id)initWithCoder:(NSCoder*)coder
|
|
|
|
{
|
|
|
|
active = FALSE;
|
|
|
|
if((self = [super initWithCoder:coder]))
|
|
|
|
{
|
|
|
|
self = [self initGLES];
|
2016-04-02 18:26:12 +00:00
|
|
|
}
|
2014-02-10 01:10:30 +00:00
|
|
|
return self;
|
|
|
|
}
|
|
|
|
|
|
|
|
-(id)initGLES
|
|
|
|
{
|
|
|
|
// Get our backing layer
|
|
|
|
CAEAGLLayer *eaglLayer = (CAEAGLLayer*) self.layer;
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
// Configure it so that it is opaque, does not retain the contents of the backbuffer when displayed, and uses RGBA8888 color.
|
|
|
|
eaglLayer.opaque = YES;
|
|
|
|
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:
|
|
|
|
[NSNumber numberWithBool:FALSE], kEAGLDrawablePropertyRetainedBacking,
|
|
|
|
kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat,
|
|
|
|
nil];
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
// Create our EAGLContext, and if successful make it current and create our framebuffer.
|
2017-01-07 08:33:11 +00:00
|
|
|
context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3];
|
2014-02-10 01:10:30 +00:00
|
|
|
|
|
|
|
if(!context || ![EAGLContext setCurrentContext:context] || ![self createFramebuffer])
|
|
|
|
{
|
|
|
|
[self release];
|
|
|
|
return nil;
|
|
|
|
}
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
// Default the animation interval to 1/60th of a second.
|
|
|
|
animationInterval = 1.0 / 60.0;
|
|
|
|
return self;
|
|
|
|
}
|
|
|
|
|
|
|
|
-(id<GLViewDelegate>)delegate
|
|
|
|
{
|
|
|
|
return delegate;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update the delegate, and if it needs a -setupView: call, set our internal flag so that it will be called.
|
|
|
|
-(void)setDelegate:(id<GLViewDelegate>)d
|
|
|
|
{
|
|
|
|
delegate = d;
|
|
|
|
delegateSetup = ![delegate respondsToSelector:@selector(setupView:)];
|
|
|
|
}
|
|
|
|
|
2015-10-23 00:31:09 +00:00
|
|
|
@synthesize useCADisplayLink;
|
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
// If our view is resized, we'll be asked to layout subviews.
|
|
|
|
// This is the perfect opportunity to also update the framebuffer so that it is
|
|
|
|
// the same size as our display area.
|
2014-03-14 01:57:24 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
-(void)layoutSubviews
|
|
|
|
{
|
2015-10-23 00:31:09 +00:00
|
|
|
//printf("HERE\n");
|
2014-02-10 01:10:30 +00:00
|
|
|
[EAGLContext setCurrentContext:context];
|
|
|
|
[self destroyFramebuffer];
|
|
|
|
[self createFramebuffer];
|
|
|
|
[self drawView];
|
2015-09-10 22:01:02 +00:00
|
|
|
[self drawView];
|
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
- (BOOL)createFramebuffer
|
|
|
|
{
|
|
|
|
// Generate IDs for a framebuffer object and a color renderbuffer
|
|
|
|
UIScreen* mainscr = [UIScreen mainScreen];
|
2014-03-14 01:57:24 +00:00
|
|
|
printf("******** screen size %i, %i\n", (int)mainscr.currentMode.size.width, (int)mainscr.currentMode.size.height);
|
2015-06-04 04:57:33 +00:00
|
|
|
float minPointSize = MIN(mainscr.bounds.size.width, mainscr.bounds.size.height);
|
|
|
|
float minScreenSize = MIN(mainscr.currentMode.size.width, mainscr.currentMode.size.height);
|
|
|
|
self.contentScaleFactor = minScreenSize / minPointSize;
|
2014-02-10 01:10:30 +00:00
|
|
|
|
|
|
|
glGenFramebuffersOES(1, &viewFramebuffer);
|
|
|
|
glGenRenderbuffersOES(1, &viewRenderbuffer);
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
|
|
|
|
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
|
|
|
|
// This call associates the storage for the current render buffer with the EAGLDrawable (our CAEAGLLayer)
|
2017-03-24 20:45:31 +00:00
|
|
|
// allowing us to draw into a buffer that will later be rendered to screen wherever the layer is (which corresponds with our view).
|
2014-02-10 01:10:30 +00:00
|
|
|
[context renderbufferStorage:GL_RENDERBUFFER_OES fromDrawable:(id<EAGLDrawable>)self.layer];
|
|
|
|
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, viewRenderbuffer);
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_WIDTH_OES, &backingWidth);
|
|
|
|
glGetRenderbufferParameterivOES(GL_RENDERBUFFER_OES, GL_RENDERBUFFER_HEIGHT_OES, &backingHeight);
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
// For this sample, we also need a depth buffer, so we'll create and attach one via another renderbuffer.
|
|
|
|
glGenRenderbuffersOES(1, &depthRenderbuffer);
|
|
|
|
glBindRenderbufferOES(GL_RENDERBUFFER_OES, depthRenderbuffer);
|
|
|
|
glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_DEPTH_COMPONENT16_OES, backingWidth, backingHeight);
|
|
|
|
glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_DEPTH_ATTACHMENT_OES, GL_RENDERBUFFER_OES, depthRenderbuffer);
|
|
|
|
|
|
|
|
if(glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES) != GL_FRAMEBUFFER_COMPLETE_OES)
|
|
|
|
{
|
|
|
|
NSLog(@"failed to make complete framebuffer object %x", glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES));
|
|
|
|
return NO;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (OS::get_singleton()) {
|
|
|
|
OS::VideoMode vm;
|
|
|
|
vm.fullscreen = true;
|
|
|
|
vm.width = backingWidth;
|
|
|
|
vm.height = backingHeight;
|
|
|
|
vm.resizable = false;
|
|
|
|
OS::get_singleton()->set_video_mode(vm);
|
|
|
|
OSIPhone::get_singleton()->set_base_framebuffer(viewFramebuffer);
|
|
|
|
};
|
|
|
|
gl_view_base_fb = viewFramebuffer;
|
|
|
|
|
|
|
|
return YES;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Clean up any buffers we have allocated.
|
|
|
|
- (void)destroyFramebuffer
|
|
|
|
{
|
|
|
|
glDeleteFramebuffersOES(1, &viewFramebuffer);
|
|
|
|
viewFramebuffer = 0;
|
|
|
|
glDeleteRenderbuffersOES(1, &viewRenderbuffer);
|
|
|
|
viewRenderbuffer = 0;
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
if(depthRenderbuffer)
|
|
|
|
{
|
|
|
|
glDeleteRenderbuffersOES(1, &depthRenderbuffer);
|
|
|
|
depthRenderbuffer = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)startAnimation
|
|
|
|
{
|
|
|
|
if (active)
|
|
|
|
return;
|
|
|
|
active = TRUE;
|
|
|
|
printf("start animation!\n");
|
2015-10-23 00:31:09 +00:00
|
|
|
if (useCADisplayLink) {
|
|
|
|
|
|
|
|
// Approximate frame rate
|
|
|
|
// assumes device refreshes at 60 fps
|
|
|
|
int frameInterval = (int) floor(animationInterval * 60.0f);
|
|
|
|
|
|
|
|
displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(drawView)];
|
|
|
|
[displayLink setFrameInterval:frameInterval];
|
|
|
|
|
|
|
|
// Setup DisplayLink in main thread
|
|
|
|
[displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes];
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
animationTimer = [NSTimer scheduledTimerWithTimeInterval:animationInterval target:self selector:@selector(drawView) userInfo:nil repeats:YES];
|
|
|
|
}
|
2014-11-26 17:24:13 +00:00
|
|
|
|
|
|
|
if (video_playing)
|
|
|
|
{
|
|
|
|
_unpause_video();
|
|
|
|
}
|
2014-02-10 01:10:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
- (void)stopAnimation
|
|
|
|
{
|
|
|
|
if (!active)
|
|
|
|
return;
|
|
|
|
active = FALSE;
|
|
|
|
printf("******** stop animation!\n");
|
2015-10-23 00:31:09 +00:00
|
|
|
|
|
|
|
if (useCADisplayLink) {
|
|
|
|
[displayLink invalidate];
|
|
|
|
displayLink = nil;
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
[animationTimer invalidate];
|
|
|
|
animationTimer = nil;
|
|
|
|
}
|
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
clear_touches();
|
2014-11-26 17:24:13 +00:00
|
|
|
|
|
|
|
if (video_playing)
|
|
|
|
{
|
|
|
|
// save position
|
|
|
|
}
|
2014-02-10 01:10:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
- (void)setAnimationInterval:(NSTimeInterval)interval
|
|
|
|
{
|
|
|
|
animationInterval = interval;
|
2015-10-23 00:31:09 +00:00
|
|
|
if ( (useCADisplayLink && displayLink) || ( !useCADisplayLink && animationTimer ) ) {
|
2014-02-10 01:10:30 +00:00
|
|
|
[self stopAnimation];
|
|
|
|
[self startAnimation];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Updates the OpenGL view when the timer fires
|
|
|
|
- (void)drawView
|
|
|
|
{
|
2015-10-23 00:31:09 +00:00
|
|
|
if (useCADisplayLink) {
|
|
|
|
// Pause the CADisplayLink to avoid recursion
|
|
|
|
[displayLink setPaused: YES];
|
2015-02-10 19:54:22 +00:00
|
|
|
|
2015-10-23 00:31:09 +00:00
|
|
|
// Process all input events
|
|
|
|
while(CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0, TRUE) == kCFRunLoopRunHandledSource);
|
2015-02-10 19:54:22 +00:00
|
|
|
|
2015-10-23 00:31:09 +00:00
|
|
|
// We are good to go, resume the CADisplayLink
|
|
|
|
[displayLink setPaused: NO];
|
|
|
|
}
|
2015-02-10 19:54:22 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
if (!active) {
|
|
|
|
printf("draw view not active!\n");
|
|
|
|
return;
|
|
|
|
};
|
|
|
|
|
|
|
|
// Make sure that you are drawing to the current context
|
|
|
|
[EAGLContext setCurrentContext:context];
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
// If our drawing delegate needs to have the view setup, then call -setupView: and flag that it won't need to be called again.
|
|
|
|
if(!delegateSetup)
|
|
|
|
{
|
|
|
|
[delegate setupView:self];
|
|
|
|
delegateSetup = YES;
|
|
|
|
}
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
glBindFramebufferOES(GL_FRAMEBUFFER_OES, viewFramebuffer);
|
|
|
|
|
|
|
|
[delegate drawView:self];
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
glBindRenderbufferOES(GL_RENDERBUFFER_OES, viewRenderbuffer);
|
|
|
|
[context presentRenderbuffer:GL_RENDERBUFFER_OES];
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-11-26 17:24:13 +00:00
|
|
|
#ifdef DEBUG_ENABLED
|
2014-02-10 01:10:30 +00:00
|
|
|
GLenum err = glGetError();
|
|
|
|
if(err)
|
|
|
|
NSLog(@"%x error", err);
|
2014-11-26 17:24:13 +00:00
|
|
|
#endif
|
2014-02-10 01:10:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
|
|
|
|
{
|
|
|
|
NSArray* tlist = [[event allTouches] allObjects];
|
|
|
|
for (unsigned int i=0; i< [tlist count]; i++) {
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
if ( [touches containsObject:[tlist objectAtIndex:i]] ) {
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
UITouch* touch = [tlist objectAtIndex:i];
|
|
|
|
if (touch.phase != UITouchPhaseBegan)
|
|
|
|
continue;
|
|
|
|
int tid = get_touch_id(touch);
|
|
|
|
ERR_FAIL_COND(tid == -1);
|
|
|
|
CGPoint touchPoint = [touch locationInView:self];
|
|
|
|
OSIPhone::get_singleton()->mouse_button(tid, touchPoint.x * self.contentScaleFactor, touchPoint.y * self.contentScaleFactor, true, touch.tapCount > 1, tid == 0);
|
|
|
|
};
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
|
|
|
|
{
|
|
|
|
|
|
|
|
NSArray* tlist = [[event allTouches] allObjects];
|
|
|
|
for (unsigned int i=0; i< [tlist count]; i++) {
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
if ( [touches containsObject:[tlist objectAtIndex:i]] ) {
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
UITouch* touch = [tlist objectAtIndex:i];
|
|
|
|
if (touch.phase != UITouchPhaseMoved)
|
|
|
|
continue;
|
|
|
|
int tid = get_touch_id(touch);
|
|
|
|
ERR_FAIL_COND(tid == -1);
|
|
|
|
int first = get_first_id(touch);
|
|
|
|
CGPoint touchPoint = [touch locationInView:self];
|
|
|
|
CGPoint prev_point = [touch previousLocationInView:self];
|
|
|
|
OSIPhone::get_singleton()->mouse_move(tid, prev_point.x * self.contentScaleFactor, prev_point.y * self.contentScaleFactor, touchPoint.x * self.contentScaleFactor, touchPoint.y * self.contentScaleFactor, first == tid);
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
|
|
|
|
{
|
|
|
|
NSArray* tlist = [[event allTouches] allObjects];
|
|
|
|
for (unsigned int i=0; i< [tlist count]; i++) {
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
if ( [touches containsObject:[tlist objectAtIndex:i]] ) {
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
UITouch* touch = [tlist objectAtIndex:i];
|
|
|
|
if (touch.phase != UITouchPhaseEnded)
|
|
|
|
continue;
|
|
|
|
int tid = get_touch_id(touch);
|
|
|
|
ERR_FAIL_COND(tid == -1);
|
|
|
|
int rem = remove_touch(touch);
|
|
|
|
CGPoint touchPoint = [touch locationInView:self];
|
|
|
|
OSIPhone::get_singleton()->mouse_button(tid, touchPoint.x * self.contentScaleFactor, touchPoint.y * self.contentScaleFactor, false, false, rem == 0);
|
|
|
|
};
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)touchesCancelled:(NSSet *)touches withEvent:(UIEvent *)event {
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
OSIPhone::get_singleton()->touches_cancelled();
|
|
|
|
clear_touches();
|
|
|
|
};
|
|
|
|
|
|
|
|
- (BOOL)canBecomeFirstResponder {
|
|
|
|
return YES;
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
|
- (void)open_keyboard {
|
|
|
|
//keyboard_text = p_existing;
|
|
|
|
[self becomeFirstResponder];
|
|
|
|
};
|
|
|
|
|
|
|
|
- (void)hide_keyboard {
|
|
|
|
//keyboard_text = p_existing;
|
|
|
|
[self resignFirstResponder];
|
|
|
|
};
|
|
|
|
|
|
|
|
- (void)deleteBackward {
|
|
|
|
if (keyboard_text.length())
|
|
|
|
keyboard_text.erase(keyboard_text.length() - 1, 1);
|
|
|
|
OSIPhone::get_singleton()->key(KEY_BACKSPACE, true);
|
|
|
|
};
|
|
|
|
|
|
|
|
- (BOOL)hasText {
|
|
|
|
return keyboard_text.length() ? YES : NO;
|
|
|
|
};
|
|
|
|
|
|
|
|
- (void)insertText:(NSString *)p_text {
|
|
|
|
String character;
|
|
|
|
character.parse_utf8([p_text UTF8String]);
|
|
|
|
keyboard_text = keyboard_text + character;
|
|
|
|
OSIPhone::get_singleton()->key(character[0] == 10 ? KEY_ENTER : character[0] , true);
|
|
|
|
printf("inserting text with character %i\n", character[0]);
|
|
|
|
};
|
|
|
|
|
2015-03-03 17:39:13 +00:00
|
|
|
- (void)audioRouteChangeListenerCallback:(NSNotification*)notification
|
|
|
|
{
|
2015-11-16 00:07:21 +00:00
|
|
|
printf("*********** route changed!\n");
|
2015-03-03 17:39:13 +00:00
|
|
|
NSDictionary *interuptionDict = notification.userInfo;
|
|
|
|
|
|
|
|
NSInteger routeChangeReason = [[interuptionDict valueForKey:AVAudioSessionRouteChangeReasonKey] integerValue];
|
|
|
|
|
|
|
|
switch (routeChangeReason) {
|
|
|
|
|
|
|
|
case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
|
|
|
|
NSLog(@"AVAudioSessionRouteChangeReasonNewDeviceAvailable");
|
|
|
|
NSLog(@"Headphone/Line plugged in");
|
|
|
|
break;
|
|
|
|
|
|
|
|
case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
|
|
|
|
NSLog(@"AVAudioSessionRouteChangeReasonOldDeviceUnavailable");
|
|
|
|
NSLog(@"Headphone/Line was pulled. Resuming video play....");
|
2015-10-23 00:31:09 +00:00
|
|
|
if (_is_video_playing()) {
|
2015-03-03 17:39:13 +00:00
|
|
|
|
|
|
|
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 0.5f * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
|
|
|
|
[_instance.avPlayer play]; // NOTE: change this line according your current player implementation
|
|
|
|
NSLog(@"resumed play");
|
|
|
|
});
|
|
|
|
};
|
|
|
|
break;
|
|
|
|
|
|
|
|
case AVAudioSessionRouteChangeReasonCategoryChange:
|
|
|
|
// called at start - also when other audio wants to play
|
|
|
|
NSLog(@"AVAudioSessionRouteChangeReasonCategoryChange");
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
|
|
|
|
// When created via code however, we get initWithFrame
|
|
|
|
-(id)initWithFrame:(CGRect)frame
|
|
|
|
{
|
|
|
|
self = [super initWithFrame:frame];
|
|
|
|
_instance = self;
|
|
|
|
printf("after init super %p\n", self);
|
|
|
|
if(self != nil)
|
|
|
|
{
|
|
|
|
self = [self initGLES];
|
|
|
|
printf("after init gles %p\n", self);
|
|
|
|
}
|
|
|
|
init_touches();
|
|
|
|
self. multipleTouchEnabled = YES;
|
|
|
|
|
2015-03-03 17:39:13 +00:00
|
|
|
printf("******** adding observer for sound routing changes\n");
|
|
|
|
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(audioRouteChangeListenerCallback:)
|
|
|
|
name:AVAudioSessionRouteChangeNotification
|
|
|
|
object:nil];
|
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
//self.autoresizesSubviews = YES;
|
|
|
|
//[self setAutoresizingMask:UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleWidth];
|
|
|
|
|
|
|
|
return self;
|
|
|
|
}
|
|
|
|
|
2014-04-05 15:39:30 +00:00
|
|
|
// -(BOOL)automaticallyForwardAppearanceAndRotationMethodsToChildViewControllers {
|
|
|
|
// return YES;
|
|
|
|
// }
|
|
|
|
|
|
|
|
// - (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation{
|
|
|
|
// return YES;
|
|
|
|
// }
|
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
// Stop animating and release resources when they are no longer needed.
|
|
|
|
- (void)dealloc
|
|
|
|
{
|
|
|
|
[self stopAnimation];
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
if([EAGLContext currentContext] == context)
|
|
|
|
{
|
|
|
|
[EAGLContext setCurrentContext:nil];
|
|
|
|
}
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
[context release];
|
|
|
|
context = nil;
|
|
|
|
|
|
|
|
[super dealloc];
|
|
|
|
}
|
|
|
|
|
2014-11-17 10:46:11 +00:00
|
|
|
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object
|
|
|
|
change:(NSDictionary *)change context:(void *)context {
|
2014-11-26 17:24:13 +00:00
|
|
|
|
|
|
|
if (object == _instance.avPlayerItem && [keyPath isEqualToString:@"status"]) {
|
|
|
|
if (_instance.avPlayerItem.status == AVPlayerStatusFailed || _instance.avPlayer.status == AVPlayerStatusFailed) {
|
2014-11-17 10:46:11 +00:00
|
|
|
_stop_video();
|
|
|
|
video_found_error = true;
|
|
|
|
}
|
2014-11-26 17:24:13 +00:00
|
|
|
|
2016-04-02 18:26:12 +00:00
|
|
|
if(_instance.avPlayer.status == AVPlayerStatusReadyToPlay &&
|
|
|
|
_instance.avPlayerItem.status == AVPlayerItemStatusReadyToPlay &&
|
2014-11-26 17:24:13 +00:00
|
|
|
CMTIME_COMPARE_INLINE(video_current_time, ==, kCMTimeZero)) {
|
|
|
|
|
|
|
|
//NSLog(@"time: %@", video_current_time);
|
|
|
|
|
|
|
|
[_instance.avPlayer seekToTime:video_current_time];
|
|
|
|
video_current_time = kCMTimeZero;
|
|
|
|
}
|
2014-11-17 10:46:11 +00:00
|
|
|
}
|
2015-03-03 17:39:13 +00:00
|
|
|
|
|
|
|
if (object == _instance.avPlayer && [keyPath isEqualToString:@"rate"]) {
|
|
|
|
NSLog(@"Player playback rate changed: %.5f", _instance.avPlayer.rate);
|
|
|
|
if (_is_video_playing() && _instance.avPlayer.rate == 0.0 && !_instance.avPlayer.error) {
|
|
|
|
dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 0.5f * NSEC_PER_SEC), dispatch_get_main_queue(), ^{
|
|
|
|
[_instance.avPlayer play]; // NOTE: change this line according your current player implementation
|
|
|
|
NSLog(@"resumed play");
|
|
|
|
});
|
|
|
|
|
|
|
|
NSLog(@" . . . PAUSED (or just started)");
|
|
|
|
}
|
|
|
|
}
|
2014-11-17 10:46:11 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
- (void)playerItemDidReachEnd:(NSNotification *)notification {
|
|
|
|
_stop_video();
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
2014-03-14 01:57:24 +00:00
|
|
|
- (void)moviePlayBackDidFinish:(NSNotification*)notification {
|
2016-04-02 18:26:12 +00:00
|
|
|
|
2014-05-14 04:22:15 +00:00
|
|
|
|
|
|
|
NSNumber* reason = [[notification userInfo] objectForKey:MPMoviePlayerPlaybackDidFinishReasonUserInfoKey];
|
|
|
|
switch ([reason intValue]) {
|
|
|
|
case MPMovieFinishReasonPlaybackEnded:
|
|
|
|
//NSLog(@"Playback Ended");
|
|
|
|
break;
|
|
|
|
case MPMovieFinishReasonPlaybackError:
|
|
|
|
//NSLog(@"Playback Error");
|
|
|
|
video_found_error = true;
|
|
|
|
break;
|
|
|
|
case MPMovieFinishReasonUserExited:
|
|
|
|
//NSLog(@"User Exited");
|
|
|
|
video_found_error = true;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
//NSLog(@"Unsupported reason!");
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2014-03-14 01:57:24 +00:00
|
|
|
MPMoviePlayerController *player = [notification object];
|
2014-05-14 04:22:15 +00:00
|
|
|
|
2014-03-14 01:57:24 +00:00
|
|
|
[[NSNotificationCenter defaultCenter]
|
|
|
|
removeObserver:self
|
|
|
|
name:MPMoviePlayerPlaybackDidFinishNotification
|
|
|
|
object:player];
|
|
|
|
|
2014-05-14 04:22:15 +00:00
|
|
|
[_instance.moviePlayerController stop];
|
2014-05-01 12:53:37 +00:00
|
|
|
[_instance.moviePlayerController.view removeFromSuperview];
|
2014-05-14 04:22:15 +00:00
|
|
|
|
2014-09-15 14:33:30 +00:00
|
|
|
//[[MPMusicPlayerController applicationMusicPlayer] setVolume: video_previous_volume];
|
2014-05-24 04:35:47 +00:00
|
|
|
video_playing = false;
|
2014-03-14 01:57:24 +00:00
|
|
|
}
|
2014-11-17 10:46:11 +00:00
|
|
|
*/
|
2014-03-14 01:57:24 +00:00
|
|
|
|
2014-02-10 01:10:30 +00:00
|
|
|
@end
|