2017-08-20 14:17:24 +00:00
|
|
|
/*************************************************************************/
|
|
|
|
/* camera_ios.mm */
|
|
|
|
/*************************************************************************/
|
|
|
|
/* This file is part of: */
|
|
|
|
/* GODOT ENGINE */
|
2019-06-17 10:42:05 +00:00
|
|
|
/* https://godotengine.org */
|
2017-08-20 14:17:24 +00:00
|
|
|
/*************************************************************************/
|
2021-01-01 19:13:46 +00:00
|
|
|
/* Copyright (c) 2007-2021 Juan Linietsky, Ariel Manzur. */
|
|
|
|
/* Copyright (c) 2014-2021 Godot Engine contributors (cf. AUTHORS.md). */
|
2017-08-20 14:17:24 +00:00
|
|
|
/* */
|
|
|
|
/* Permission is hereby granted, free of charge, to any person obtaining */
|
|
|
|
/* a copy of this software and associated documentation files (the */
|
|
|
|
/* "Software"), to deal in the Software without restriction, including */
|
|
|
|
/* without limitation the rights to use, copy, modify, merge, publish, */
|
|
|
|
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
|
|
|
/* permit persons to whom the Software is furnished to do so, subject to */
|
|
|
|
/* the following conditions: */
|
|
|
|
/* */
|
|
|
|
/* The above copyright notice and this permission notice shall be */
|
|
|
|
/* included in all copies or substantial portions of the Software. */
|
|
|
|
/* */
|
|
|
|
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
|
|
|
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
|
|
|
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
|
|
|
|
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
|
|
|
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
|
|
|
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
|
|
|
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
|
|
|
/*************************************************************************/
|
|
|
|
|
2019-11-22 07:35:03 +00:00
|
|
|
///@TODO this is a near duplicate of CameraOSX, we should find a way to combine those to minimize code duplication!!!!
|
2017-08-20 14:17:24 +00:00
|
|
|
// If you fix something here, make sure you fix it there as wel!
|
|
|
|
|
|
|
|
#include "camera_ios.h"
|
|
|
|
#include "servers/camera/camera_feed.h"
|
|
|
|
|
|
|
|
#import <AVFoundation/AVFoundation.h>
|
|
|
|
#import <UIKit/UIKit.h>
|
|
|
|
|
|
|
|
//////////////////////////////////////////////////////////////////////////
|
|
|
|
// MyCaptureSession - This is a little helper class so we can capture our frames
|
|
|
|
|
|
|
|
@interface MyCaptureSession : AVCaptureSession <AVCaptureVideoDataOutputSampleBufferDelegate> {
|
|
|
|
Ref<CameraFeed> feed;
|
|
|
|
size_t width[2];
|
|
|
|
size_t height[2];
|
|
|
|
PoolVector<uint8_t> img_data[2];
|
|
|
|
|
|
|
|
AVCaptureDeviceInput *input;
|
|
|
|
AVCaptureVideoDataOutput *output;
|
|
|
|
}
|
|
|
|
|
|
|
|
@end
|
|
|
|
|
|
|
|
@implementation MyCaptureSession
|
|
|
|
|
|
|
|
- (id)initForFeed:(Ref<CameraFeed>)p_feed andDevice:(AVCaptureDevice *)p_device {
|
|
|
|
if (self = [super init]) {
|
|
|
|
NSError *error;
|
|
|
|
feed = p_feed;
|
|
|
|
width[0] = 0;
|
|
|
|
height[0] = 0;
|
|
|
|
width[1] = 0;
|
|
|
|
height[1] = 0;
|
|
|
|
|
|
|
|
// prepare our device
|
|
|
|
[p_device lockForConfiguration:&error];
|
|
|
|
|
|
|
|
[p_device setFocusMode:AVCaptureFocusModeLocked];
|
|
|
|
[p_device setExposureMode:AVCaptureExposureModeLocked];
|
|
|
|
[p_device setWhiteBalanceMode:AVCaptureWhiteBalanceModeLocked];
|
|
|
|
|
|
|
|
[p_device unlockForConfiguration];
|
|
|
|
|
|
|
|
[self beginConfiguration];
|
|
|
|
|
|
|
|
// setup our capture
|
|
|
|
self.sessionPreset = AVCaptureSessionPreset1280x720;
|
|
|
|
|
|
|
|
input = [AVCaptureDeviceInput deviceInputWithDevice:p_device error:&error];
|
|
|
|
if (!input) {
|
|
|
|
print_line("Couldn't get input device for camera");
|
|
|
|
} else {
|
|
|
|
[self addInput:input];
|
|
|
|
}
|
|
|
|
|
|
|
|
output = [AVCaptureVideoDataOutput new];
|
|
|
|
if (!output) {
|
|
|
|
print_line("Couldn't get output device for camera");
|
|
|
|
} else {
|
|
|
|
NSDictionary *settings = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) };
|
|
|
|
output.videoSettings = settings;
|
|
|
|
|
|
|
|
// discard if the data output queue is blocked (as we process the still image)
|
|
|
|
[output setAlwaysDiscardsLateVideoFrames:YES];
|
|
|
|
|
|
|
|
// now set ourselves as the delegate to receive new frames. Note that we're doing this on the main thread at the moment, we may need to change this..
|
|
|
|
[output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
|
|
|
|
|
|
|
|
[self addOutput:output];
|
|
|
|
}
|
|
|
|
|
|
|
|
[self commitConfiguration];
|
|
|
|
|
|
|
|
// kick off our session..
|
|
|
|
[self startRunning];
|
|
|
|
};
|
|
|
|
return self;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)cleanup {
|
|
|
|
// stop running
|
|
|
|
[self stopRunning];
|
|
|
|
|
|
|
|
// cleanup
|
|
|
|
[self beginConfiguration];
|
|
|
|
|
|
|
|
if (input) {
|
|
|
|
[self removeInput:input];
|
|
|
|
// don't release this
|
|
|
|
input = nil;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (output) {
|
|
|
|
[self removeOutput:output];
|
|
|
|
[output setSampleBufferDelegate:nil queue:NULL];
|
|
|
|
output = nil;
|
|
|
|
}
|
|
|
|
|
|
|
|
[self commitConfiguration];
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
|
|
|
|
// This gets called every time our camera has a new image for us to process.
|
|
|
|
// May need to investigate in a way to throttle this if we get more images then we're rendering frames..
|
|
|
|
|
|
|
|
// For now, version 1, we're just doing the bare minimum to make this work...
|
|
|
|
|
|
|
|
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
|
|
|
// int width = CVPixelBufferGetWidth(pixelBuffer);
|
|
|
|
// int height = CVPixelBufferGetHeight(pixelBuffer);
|
|
|
|
|
|
|
|
// It says that we need to lock this on the documentation pages but it's not in the samples
|
|
|
|
// need to lock our base address so we can access our pixel buffers, better safe then sorry?
|
|
|
|
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
|
|
|
|
|
|
|
// get our buffers
|
|
|
|
unsigned char *dataY = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
|
|
|
|
unsigned char *dataCbCr = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
|
|
|
|
if (dataY == NULL) {
|
|
|
|
print_line("Couldn't access Y pixel buffer data");
|
|
|
|
} else if (dataCbCr == NULL) {
|
|
|
|
print_line("Couldn't access CbCr pixel buffer data");
|
|
|
|
} else {
|
2020-10-05 19:19:38 +00:00
|
|
|
UIInterfaceOrientation orientation = UIInterfaceOrientationUnknown;
|
|
|
|
|
|
|
|
if (@available(iOS 13, *)) {
|
|
|
|
orientation = [UIApplication sharedApplication].delegate.window.windowScene.interfaceOrientation;
|
|
|
|
} else {
|
|
|
|
orientation = [[UIApplication sharedApplication] statusBarOrientation];
|
|
|
|
}
|
|
|
|
|
2017-08-20 14:17:24 +00:00
|
|
|
Ref<Image> img[2];
|
|
|
|
|
|
|
|
{
|
|
|
|
// do Y
|
2020-09-30 19:08:01 +00:00
|
|
|
size_t new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
|
|
|
|
size_t new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
|
2017-08-20 14:17:24 +00:00
|
|
|
|
|
|
|
if ((width[0] != new_width) || (height[0] != new_height)) {
|
|
|
|
width[0] = new_width;
|
|
|
|
height[0] = new_height;
|
|
|
|
img_data[0].resize(new_width * new_height);
|
|
|
|
}
|
|
|
|
|
|
|
|
PoolVector<uint8_t>::Write w = img_data[0].write();
|
|
|
|
memcpy(w.ptr(), dataY, new_width * new_height);
|
|
|
|
|
|
|
|
img[0].instance();
|
|
|
|
img[0]->create(new_width, new_height, 0, Image::FORMAT_R8, img_data[0]);
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
// do CbCr
|
2020-09-30 19:08:01 +00:00
|
|
|
size_t new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
|
|
|
|
size_t new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
|
2017-08-20 14:17:24 +00:00
|
|
|
|
|
|
|
if ((width[1] != new_width) || (height[1] != new_height)) {
|
|
|
|
width[1] = new_width;
|
|
|
|
height[1] = new_height;
|
|
|
|
img_data[1].resize(2 * new_width * new_height);
|
|
|
|
}
|
|
|
|
|
|
|
|
PoolVector<uint8_t>::Write w = img_data[1].write();
|
|
|
|
memcpy(w.ptr(), dataCbCr, 2 * new_width * new_height);
|
|
|
|
|
|
|
|
///TODO GLES2 doesn't support FORMAT_RG8, need to do some form of conversion
|
|
|
|
img[1].instance();
|
|
|
|
img[1]->create(new_width, new_height, 0, Image::FORMAT_RG8, img_data[1]);
|
|
|
|
}
|
|
|
|
|
|
|
|
// set our texture...
|
|
|
|
feed->set_YCbCr_imgs(img[0], img[1]);
|
|
|
|
|
|
|
|
// update our matrix to match the orientation, note, before changing anything
|
|
|
|
// here, be aware that the project orientation settings must match your xcode
|
|
|
|
// settings or this will go wrong!
|
|
|
|
Transform2D display_transform;
|
|
|
|
switch (orientation) {
|
|
|
|
case UIInterfaceOrientationPortrait: {
|
|
|
|
display_transform = Transform2D(0.0, -1.0, -1.0, 0.0, 1.0, 1.0);
|
|
|
|
} break;
|
|
|
|
case UIInterfaceOrientationLandscapeRight: {
|
|
|
|
display_transform = Transform2D(1.0, 0.0, 0.0, -1.0, 0.0, 1.0);
|
|
|
|
} break;
|
|
|
|
case UIInterfaceOrientationLandscapeLeft: {
|
|
|
|
display_transform = Transform2D(-1.0, 0.0, 0.0, 1.0, 1.0, 0.0);
|
|
|
|
} break;
|
|
|
|
default: {
|
|
|
|
display_transform = Transform2D(0.0, 1.0, 1.0, 0.0, 0.0, 0.0);
|
|
|
|
} break;
|
|
|
|
}
|
|
|
|
|
|
|
|
//TODO: this is correct for the camera on the back, I have a feeling this needs to be inversed for the camera on the front!
|
|
|
|
feed->set_transform(display_transform);
|
|
|
|
}
|
|
|
|
|
|
|
|
// and unlock
|
|
|
|
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
|
|
|
}
|
|
|
|
|
|
|
|
@end
|
|
|
|
|
|
|
|
//////////////////////////////////////////////////////////////////////////
|
|
|
|
// CameraFeedIOS - Subclass for camera feeds in iOS
|
|
|
|
|
|
|
|
class CameraFeedIOS : public CameraFeed {
|
|
|
|
private:
|
|
|
|
AVCaptureDevice *device;
|
|
|
|
MyCaptureSession *capture_session;
|
|
|
|
|
|
|
|
public:
|
|
|
|
bool get_is_arkit() const;
|
|
|
|
AVCaptureDevice *get_device() const;
|
|
|
|
|
|
|
|
CameraFeedIOS();
|
|
|
|
~CameraFeedIOS();
|
|
|
|
|
|
|
|
void set_device(AVCaptureDevice *p_device);
|
|
|
|
|
|
|
|
bool activate_feed();
|
|
|
|
void deactivate_feed();
|
|
|
|
};
|
|
|
|
|
|
|
|
AVCaptureDevice *CameraFeedIOS::get_device() const {
|
|
|
|
return device;
|
|
|
|
};
|
|
|
|
|
|
|
|
CameraFeedIOS::CameraFeedIOS() {
|
|
|
|
capture_session = NULL;
|
|
|
|
device = NULL;
|
|
|
|
transform = Transform2D(1.0, 0.0, 0.0, 1.0, 0.0, 0.0); /* should re-orientate this based on device orientation */
|
|
|
|
};
|
|
|
|
|
|
|
|
void CameraFeedIOS::set_device(AVCaptureDevice *p_device) {
|
|
|
|
device = p_device;
|
2017-07-28 00:32:08 +00:00
|
|
|
|
|
|
|
// get some info
|
|
|
|
NSString *device_name = p_device.localizedName;
|
|
|
|
name = device_name.UTF8String;
|
|
|
|
position = CameraFeed::FEED_UNSPECIFIED;
|
|
|
|
if ([p_device position] == AVCaptureDevicePositionBack) {
|
2017-08-20 14:17:24 +00:00
|
|
|
position = CameraFeed::FEED_BACK;
|
2017-07-28 00:32:08 +00:00
|
|
|
} else if ([p_device position] == AVCaptureDevicePositionFront) {
|
|
|
|
position = CameraFeed::FEED_FRONT;
|
2017-08-20 14:17:24 +00:00
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
CameraFeedIOS::~CameraFeedIOS() {
|
2020-10-05 20:26:43 +00:00
|
|
|
if (capture_session) {
|
|
|
|
capture_session = nil;
|
|
|
|
}
|
2017-08-20 14:17:24 +00:00
|
|
|
|
2020-10-05 20:26:43 +00:00
|
|
|
if (device) {
|
|
|
|
device = nil;
|
|
|
|
}
|
2017-08-20 14:17:24 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
bool CameraFeedIOS::activate_feed() {
|
2017-07-28 00:32:08 +00:00
|
|
|
if (capture_session) {
|
|
|
|
// already recording!
|
2017-08-20 14:17:24 +00:00
|
|
|
} else {
|
2017-07-28 00:32:08 +00:00
|
|
|
// start camera capture
|
|
|
|
capture_session = [[MyCaptureSession alloc] initForFeed:this andDevice:device];
|
2017-08-20 14:17:24 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
return true;
|
|
|
|
};
|
|
|
|
|
|
|
|
void CameraFeedIOS::deactivate_feed() {
|
|
|
|
// end camera capture if we have one
|
|
|
|
if (capture_session) {
|
|
|
|
[capture_session cleanup];
|
2020-10-05 20:26:43 +00:00
|
|
|
capture_session = nil;
|
|
|
|
}
|
2017-08-20 14:17:24 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
//////////////////////////////////////////////////////////////////////////
|
|
|
|
// MyDeviceNotifications - This is a little helper class gets notifications
|
|
|
|
// when devices are connected/disconnected
|
|
|
|
|
|
|
|
@interface MyDeviceNotifications : NSObject {
|
|
|
|
CameraIOS *camera_server;
|
|
|
|
}
|
|
|
|
|
|
|
|
@end
|
|
|
|
|
|
|
|
@implementation MyDeviceNotifications
|
|
|
|
|
|
|
|
- (void)devices_changed:(NSNotification *)notification {
|
|
|
|
camera_server->update_feeds();
|
|
|
|
}
|
|
|
|
|
|
|
|
- (id)initForServer:(CameraIOS *)p_server {
|
|
|
|
if (self = [super init]) {
|
|
|
|
camera_server = p_server;
|
|
|
|
|
|
|
|
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(devices_changed:) name:AVCaptureDeviceWasConnectedNotification object:nil];
|
|
|
|
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(devices_changed:) name:AVCaptureDeviceWasDisconnectedNotification object:nil];
|
|
|
|
};
|
|
|
|
return self;
|
|
|
|
}
|
|
|
|
|
|
|
|
- (void)dealloc {
|
|
|
|
// remove notifications
|
|
|
|
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceWasConnectedNotification object:nil];
|
|
|
|
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceWasDisconnectedNotification object:nil];
|
|
|
|
}
|
|
|
|
|
|
|
|
@end
|
|
|
|
|
|
|
|
MyDeviceNotifications *device_notifications = nil;
|
|
|
|
|
|
|
|
//////////////////////////////////////////////////////////////////////////
|
|
|
|
// CameraIOS - Subclass for our camera server on iPhone
|
|
|
|
|
|
|
|
void CameraIOS::update_feeds() {
|
|
|
|
// this way of doing things is deprecated but still works,
|
|
|
|
// rewrite to using AVCaptureDeviceDiscoverySession
|
|
|
|
|
2020-09-30 19:08:01 +00:00
|
|
|
NSMutableArray *deviceTypes = [NSMutableArray array];
|
|
|
|
|
|
|
|
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInWideAngleCamera];
|
|
|
|
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInTelephotoCamera];
|
|
|
|
|
|
|
|
if (@available(iOS 10.2, *)) {
|
|
|
|
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInDualCamera];
|
|
|
|
}
|
|
|
|
|
|
|
|
if (@available(iOS 11.1, *)) {
|
|
|
|
[deviceTypes addObject:AVCaptureDeviceTypeBuiltInTrueDepthCamera];
|
|
|
|
}
|
|
|
|
|
|
|
|
AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession
|
|
|
|
discoverySessionWithDeviceTypes:deviceTypes
|
|
|
|
mediaType:AVMediaTypeVideo
|
|
|
|
position:AVCaptureDevicePositionUnspecified];
|
2017-08-20 14:17:24 +00:00
|
|
|
|
|
|
|
// remove devices that are gone..
|
|
|
|
for (int i = feeds.size() - 1; i >= 0; i--) {
|
2017-07-28 00:32:08 +00:00
|
|
|
Ref<CameraFeedIOS> feed(feeds[i]);
|
2017-08-20 14:17:24 +00:00
|
|
|
|
2017-07-28 00:32:08 +00:00
|
|
|
if (feed.is_null()) {
|
|
|
|
// feed not managed by us
|
2017-08-20 14:17:24 +00:00
|
|
|
} else if (![session.devices containsObject:feed->get_device()]) {
|
|
|
|
// remove it from our array, this will also destroy it ;)
|
|
|
|
remove_feed(feed);
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
// add new devices..
|
|
|
|
for (AVCaptureDevice *device in session.devices) {
|
|
|
|
bool found = false;
|
2017-07-28 00:32:08 +00:00
|
|
|
|
2017-08-20 14:17:24 +00:00
|
|
|
for (int i = 0; i < feeds.size() && !found; i++) {
|
2017-07-28 00:32:08 +00:00
|
|
|
Ref<CameraFeedIOS> feed(feeds[i]);
|
|
|
|
|
|
|
|
if (feed.is_null()) {
|
|
|
|
// feed not managed by us
|
|
|
|
} else if (feed->get_device() == device) {
|
2017-08-20 14:17:24 +00:00
|
|
|
found = true;
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
if (!found) {
|
|
|
|
Ref<CameraFeedIOS> newfeed;
|
|
|
|
newfeed.instance();
|
|
|
|
newfeed->set_device(device);
|
|
|
|
add_feed(newfeed);
|
|
|
|
};
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
CameraIOS::CameraIOS() {
|
2019-07-06 05:48:51 +00:00
|
|
|
// check if we have our usage description
|
|
|
|
NSString *usage_desc = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"NSCameraUsageDescription"];
|
|
|
|
if (usage_desc == NULL) {
|
|
|
|
// don't initialise if we don't get anything
|
|
|
|
print_line("No NSCameraUsageDescription key in pList, no access to cameras.");
|
|
|
|
return;
|
|
|
|
} else if (usage_desc.length == 0) {
|
|
|
|
// don't initialise if we don't get anything
|
|
|
|
print_line("Empty NSCameraUsageDescription key in pList, no access to cameras.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// now we'll request access.
|
|
|
|
// If this is the first time the user will be prompted with the string (iOS will read it).
|
|
|
|
// Once a decision is made it is returned. If the user wants to change it later on they
|
|
|
|
// need to go into setting.
|
2017-07-28 00:32:08 +00:00
|
|
|
print_line("Requesting Camera permissions");
|
|
|
|
|
2017-08-20 14:17:24 +00:00
|
|
|
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo
|
|
|
|
completionHandler:^(BOOL granted) {
|
|
|
|
if (granted) {
|
2017-07-28 00:32:08 +00:00
|
|
|
print_line("Access to cameras granted!");
|
|
|
|
|
2017-08-20 14:17:24 +00:00
|
|
|
// Find available cameras we have at this time
|
|
|
|
update_feeds();
|
|
|
|
|
|
|
|
// should only have one of these....
|
|
|
|
device_notifications = [[MyDeviceNotifications alloc] initForServer:this];
|
|
|
|
} else {
|
|
|
|
print_line("No access to cameras!");
|
|
|
|
}
|
|
|
|
}];
|
|
|
|
};
|
|
|
|
|
|
|
|
CameraIOS::~CameraIOS() {
|
2020-10-05 20:26:43 +00:00
|
|
|
device_notifications = nil;
|
2017-08-20 14:17:24 +00:00
|
|
|
};
|