Merge pull request #53666 from pkowal1982/camera

Add Linux camera support
This commit is contained in:
Rémi Verschelde 2024-09-25 12:39:03 +02:00
commit 26340e0b91
No known key found for this signature in database
GPG Key ID: C3336907360768E1
16 changed files with 1108 additions and 10 deletions

View File

@ -34,6 +34,17 @@
Returns the position of camera on the device. Returns the position of camera on the device.
</description> </description>
</method> </method>
<method name="set_format">
<return type="bool" />
<param index="0" name="index" type="int" />
<param index="1" name="parameters" type="Dictionary" />
<description>
Sets the feed format parameters for the given index in the [member formats] array. Returns [code]true[/code] on success. By default YUYV encoded stream is transformed to FEED_RGB. YUYV encoded stream output format can be changed with [param parameters].output value:
[code]separate[/code] will result in FEED_YCBCR_SEP
[code]grayscale[/code] will result in desaturated FEED_RGB
[code]copy[/code] will result in FEED_YCBCR
</description>
</method>
</methods> </methods>
<members> <members>
<member name="feed_is_active" type="bool" setter="set_active" getter="is_active" default="false"> <member name="feed_is_active" type="bool" setter="set_active" getter="is_active" default="false">
@ -42,7 +53,22 @@
<member name="feed_transform" type="Transform2D" setter="set_transform" getter="get_transform" default="Transform2D(1, 0, 0, -1, 0, 1)"> <member name="feed_transform" type="Transform2D" setter="set_transform" getter="get_transform" default="Transform2D(1, 0, 0, -1, 0, 1)">
The transform applied to the camera's image. The transform applied to the camera's image.
</member> </member>
<member name="formats" type="Array" setter="" getter="get_formats" default="[]">
Formats supported by the feed. Each entry is a [Dictionary] describing format parameters.
</member>
</members> </members>
<signals>
<signal name="format_changed">
<description>
Emitted when the format has changed.
</description>
</signal>
<signal name="frame_changed">
<description>
Emitted when a new frame is available.
</description>
</signal>
</signals>
<constants> <constants>
<constant name="FEED_NOIMAGE" value="0" enum="FeedDataType"> <constant name="FEED_NOIMAGE" value="0" enum="FeedDataType">
No image set for the feed. No image set for the feed.

View File

@ -6,7 +6,7 @@
<description> <description>
The [CameraServer] keeps track of different cameras accessible in Godot. These are external cameras such as webcams or the cameras on your phone. The [CameraServer] keeps track of different cameras accessible in Godot. These are external cameras such as webcams or the cameras on your phone.
It is notably used to provide AR modules with a video feed from the camera. It is notably used to provide AR modules with a video feed from the camera.
[b]Note:[/b] This class is currently only implemented on macOS and iOS. To get a [CameraFeed] on iOS, the camera plugin from [url=https://github.com/godotengine/godot-ios-plugins]godot-ios-plugins[/url] is required. On other platforms, no [CameraFeed]s will be available. [b]Note:[/b] This class is currently only implemented on Linux, macOS, and iOS, on other platforms no [CameraFeed]s will be available. To get a [CameraFeed] on iOS, the camera plugin from [url=https://github.com/godotengine/godot-ios-plugins]godot-ios-plugins[/url] is required.
</description> </description>
<tutorials> <tutorials>
</tutorials> </tutorials>

View File

@ -5,10 +5,16 @@ Import("env_modules")
env_camera = env_modules.Clone() env_camera = env_modules.Clone()
if env["platform"] == "windows": if env["platform"] in ["windows", "macos", "linuxbsd"]:
env_camera.add_source_files(env.modules_sources, "register_types.cpp") env_camera.add_source_files(env.modules_sources, "register_types.cpp")
if env["platform"] == "windows":
env_camera.add_source_files(env.modules_sources, "camera_win.cpp") env_camera.add_source_files(env.modules_sources, "camera_win.cpp")
elif env["platform"] == "macos": elif env["platform"] == "macos":
env_camera.add_source_files(env.modules_sources, "register_types.cpp")
env_camera.add_source_files(env.modules_sources, "camera_macos.mm") env_camera.add_source_files(env.modules_sources, "camera_macos.mm")
elif env["platform"] == "linuxbsd":
env_camera.add_source_files(env.modules_sources, "camera_linux.cpp")
env_camera.add_source_files(env.modules_sources, "camera_feed_linux.cpp")
env_camera.add_source_files(env.modules_sources, "buffer_decoder.cpp")

View File

@ -0,0 +1,212 @@
/**************************************************************************/
/* buffer_decoder.cpp */
/**************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/**************************************************************************/
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
#include "buffer_decoder.h"
#include "servers/camera/camera_feed.h"
#include <linux/videodev2.h>
BufferDecoder::BufferDecoder(CameraFeed *p_camera_feed) {
camera_feed = p_camera_feed;
width = camera_feed->get_format().width;
height = camera_feed->get_format().height;
image.instantiate();
}
AbstractYuyvBufferDecoder::AbstractYuyvBufferDecoder(CameraFeed *p_camera_feed) :
BufferDecoder(p_camera_feed) {
switch (camera_feed->get_format().pixel_format) {
case V4L2_PIX_FMT_YYUV:
component_indexes = new int[4]{ 0, 1, 2, 3 };
break;
case V4L2_PIX_FMT_YVYU:
component_indexes = new int[4]{ 0, 2, 3, 1 };
break;
case V4L2_PIX_FMT_UYVY:
component_indexes = new int[4]{ 1, 3, 0, 2 };
break;
case V4L2_PIX_FMT_VYUY:
component_indexes = new int[4]{ 1, 3, 2, 0 };
break;
default:
component_indexes = new int[4]{ 0, 2, 1, 3 };
}
}
AbstractYuyvBufferDecoder::~AbstractYuyvBufferDecoder() {
delete[] component_indexes;
}
SeparateYuyvBufferDecoder::SeparateYuyvBufferDecoder(CameraFeed *p_camera_feed) :
AbstractYuyvBufferDecoder(p_camera_feed) {
y_image_data.resize(width * height);
cbcr_image_data.resize(width * height);
y_image.instantiate();
cbcr_image.instantiate();
}
void SeparateYuyvBufferDecoder::decode(StreamingBuffer p_buffer) {
uint8_t *y_dst = (uint8_t *)y_image_data.ptrw();
uint8_t *uv_dst = (uint8_t *)cbcr_image_data.ptrw();
uint8_t *src = (uint8_t *)p_buffer.start;
uint8_t *y0_src = src + component_indexes[0];
uint8_t *y1_src = src + component_indexes[1];
uint8_t *u_src = src + component_indexes[2];
uint8_t *v_src = src + component_indexes[3];
for (int i = 0; i < width * height; i += 2) {
*y_dst++ = *y0_src;
*y_dst++ = *y1_src;
*uv_dst++ = *u_src;
*uv_dst++ = *v_src;
y0_src += 4;
y1_src += 4;
u_src += 4;
v_src += 4;
}
if (y_image.is_valid()) {
y_image->set_data(width, height, false, Image::FORMAT_L8, y_image_data);
} else {
y_image.instantiate(width, height, false, Image::FORMAT_RGB8, y_image_data);
}
if (cbcr_image.is_valid()) {
cbcr_image->set_data(width, height, false, Image::FORMAT_L8, cbcr_image_data);
} else {
cbcr_image.instantiate(width, height, false, Image::FORMAT_RGB8, cbcr_image_data);
}
camera_feed->set_YCbCr_imgs(y_image, cbcr_image);
}
YuyvToGrayscaleBufferDecoder::YuyvToGrayscaleBufferDecoder(CameraFeed *p_camera_feed) :
AbstractYuyvBufferDecoder(p_camera_feed) {
image_data.resize(width * height);
}
void YuyvToGrayscaleBufferDecoder::decode(StreamingBuffer p_buffer) {
uint8_t *dst = (uint8_t *)image_data.ptrw();
uint8_t *src = (uint8_t *)p_buffer.start;
uint8_t *y0_src = src + component_indexes[0];
uint8_t *y1_src = src + component_indexes[1];
for (int i = 0; i < width * height; i += 2) {
*dst++ = *y0_src;
*dst++ = *y1_src;
y0_src += 4;
y1_src += 4;
}
if (image.is_valid()) {
image->set_data(width, height, false, Image::FORMAT_L8, image_data);
} else {
image.instantiate(width, height, false, Image::FORMAT_RGB8, image_data);
}
camera_feed->set_RGB_img(image);
}
YuyvToRgbBufferDecoder::YuyvToRgbBufferDecoder(CameraFeed *p_camera_feed) :
AbstractYuyvBufferDecoder(p_camera_feed) {
image_data.resize(width * height * 3);
}
void YuyvToRgbBufferDecoder::decode(StreamingBuffer p_buffer) {
uint8_t *src = (uint8_t *)p_buffer.start;
uint8_t *y0_src = src + component_indexes[0];
uint8_t *y1_src = src + component_indexes[1];
uint8_t *u_src = src + component_indexes[2];
uint8_t *v_src = src + component_indexes[3];
uint8_t *dst = (uint8_t *)image_data.ptrw();
for (int i = 0; i < width * height; i += 2) {
int u = *u_src;
int v = *v_src;
int u1 = (((u - 128) << 7) + (u - 128)) >> 6;
int rg = (((u - 128) << 1) + (u - 128) + ((v - 128) << 2) + ((v - 128) << 1)) >> 3;
int v1 = (((v - 128) << 1) + (v - 128)) >> 1;
*dst++ = CLAMP(*y0_src + v1, 0, 255);
*dst++ = CLAMP(*y0_src - rg, 0, 255);
*dst++ = CLAMP(*y0_src + u1, 0, 255);
*dst++ = CLAMP(*y1_src + v1, 0, 255);
*dst++ = CLAMP(*y1_src - rg, 0, 255);
*dst++ = CLAMP(*y1_src + u1, 0, 255);
y0_src += 4;
y1_src += 4;
u_src += 4;
v_src += 4;
}
if (image.is_valid()) {
image->set_data(width, height, false, Image::FORMAT_RGB8, image_data);
} else {
image.instantiate(width, height, false, Image::FORMAT_RGB8, image_data);
}
camera_feed->set_RGB_img(image);
}
CopyBufferDecoder::CopyBufferDecoder(CameraFeed *p_camera_feed, bool p_rgba) :
BufferDecoder(p_camera_feed) {
rgba = p_rgba;
image_data.resize(width * height * (rgba ? 4 : 2));
}
void CopyBufferDecoder::decode(StreamingBuffer p_buffer) {
uint8_t *dst = (uint8_t *)image_data.ptrw();
memcpy(dst, p_buffer.start, p_buffer.length);
if (image.is_valid()) {
image->set_data(width, height, false, rgba ? Image::FORMAT_RGBA8 : Image::FORMAT_LA8, image_data);
} else {
image.instantiate(width, height, false, rgba ? Image::FORMAT_RGBA8 : Image::FORMAT_LA8, image_data);
}
camera_feed->set_RGB_img(image);
}
JpegBufferDecoder::JpegBufferDecoder(CameraFeed *p_camera_feed) :
BufferDecoder(p_camera_feed) {
}
void JpegBufferDecoder::decode(StreamingBuffer p_buffer) {
image_data.resize(p_buffer.length);
uint8_t *dst = (uint8_t *)image_data.ptrw();
memcpy(dst, p_buffer.start, p_buffer.length);
if (image->load_jpg_from_buffer(image_data) == OK) {
camera_feed->set_RGB_img(image);
}
}

View File

@ -0,0 +1,116 @@
/**************************************************************************/
/* buffer_decoder.h */
/**************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/**************************************************************************/
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
#ifndef BUFFER_DECODER_H
#define BUFFER_DECODER_H
#include "core/io/image.h"
#include "core/templates/vector.h"
class CameraFeed;
struct StreamingBuffer {
void *start = nullptr;
size_t length = 0;
};
class BufferDecoder {
protected:
CameraFeed *camera_feed = nullptr;
Ref<Image> image;
int width = 0;
int height = 0;
public:
virtual void decode(StreamingBuffer p_buffer) = 0;
BufferDecoder(CameraFeed *p_camera_feed);
virtual ~BufferDecoder() {}
};
class AbstractYuyvBufferDecoder : public BufferDecoder {
protected:
int *component_indexes = nullptr;
public:
AbstractYuyvBufferDecoder(CameraFeed *p_camera_feed);
~AbstractYuyvBufferDecoder();
};
class SeparateYuyvBufferDecoder : public AbstractYuyvBufferDecoder {
private:
Vector<uint8_t> y_image_data;
Vector<uint8_t> cbcr_image_data;
Ref<Image> y_image;
Ref<Image> cbcr_image;
public:
SeparateYuyvBufferDecoder(CameraFeed *p_camera_feed);
virtual void decode(StreamingBuffer p_buffer) override;
};
class YuyvToGrayscaleBufferDecoder : public AbstractYuyvBufferDecoder {
private:
Vector<uint8_t> image_data;
public:
YuyvToGrayscaleBufferDecoder(CameraFeed *p_camera_feed);
virtual void decode(StreamingBuffer p_buffer) override;
};
class YuyvToRgbBufferDecoder : public AbstractYuyvBufferDecoder {
private:
Vector<uint8_t> image_data;
public:
YuyvToRgbBufferDecoder(CameraFeed *p_camera_feed);
virtual void decode(StreamingBuffer p_buffer) override;
};
class CopyBufferDecoder : public BufferDecoder {
private:
Vector<uint8_t> image_data;
bool rgba = false;
public:
CopyBufferDecoder(CameraFeed *p_camera_feed, bool p_rgba);
virtual void decode(StreamingBuffer p_buffer) override;
};
class JpegBufferDecoder : public BufferDecoder {
private:
Vector<uint8_t> image_data;
public:
JpegBufferDecoder(CameraFeed *p_camera_feed);
virtual void decode(StreamingBuffer p_buffer) override;
};
#endif // BUFFER_DECODER_H

View File

@ -0,0 +1,363 @@
/**************************************************************************/
/* camera_feed_linux.cpp */
/**************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/**************************************************************************/
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
#include "camera_feed_linux.h"
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <unistd.h>
void CameraFeedLinux::update_buffer_thread_func(void *p_func) {
if (p_func) {
CameraFeedLinux *camera_feed_linux = (CameraFeedLinux *)p_func;
camera_feed_linux->_update_buffer();
}
}
void CameraFeedLinux::_update_buffer() {
while (!exit_flag.is_set()) {
_read_frame();
usleep(10000);
}
}
void CameraFeedLinux::_query_device(const String &p_device_name) {
file_descriptor = open(p_device_name.ascii(), O_RDWR | O_NONBLOCK, 0);
ERR_FAIL_COND_MSG(file_descriptor == -1, vformat("Cannot open file descriptor for %s. Error: %d.", p_device_name, errno));
struct v4l2_capability capability;
if (ioctl(file_descriptor, VIDIOC_QUERYCAP, &capability) == -1) {
ERR_FAIL_MSG(vformat("Cannot query device. Error: %d.", errno));
}
name = String((char *)capability.card);
for (int index = 0;; index++) {
struct v4l2_fmtdesc fmtdesc;
memset(&fmtdesc, 0, sizeof(fmtdesc));
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmtdesc.index = index;
if (ioctl(file_descriptor, VIDIOC_ENUM_FMT, &fmtdesc) == -1) {
break;
}
for (int res_index = 0;; res_index++) {
struct v4l2_frmsizeenum frmsizeenum;
memset(&frmsizeenum, 0, sizeof(frmsizeenum));
frmsizeenum.pixel_format = fmtdesc.pixelformat;
frmsizeenum.index = res_index;
if (ioctl(file_descriptor, VIDIOC_ENUM_FRAMESIZES, &frmsizeenum) == -1) {
break;
}
for (int framerate_index = 0;; framerate_index++) {
struct v4l2_frmivalenum frmivalenum;
memset(&frmivalenum, 0, sizeof(frmivalenum));
frmivalenum.pixel_format = fmtdesc.pixelformat;
frmivalenum.width = frmsizeenum.discrete.width;
frmivalenum.height = frmsizeenum.discrete.height;
frmivalenum.index = framerate_index;
if (ioctl(file_descriptor, VIDIOC_ENUM_FRAMEINTERVALS, &frmivalenum) == -1) {
if (framerate_index == 0) {
_add_format(fmtdesc, frmsizeenum.discrete, -1, 1);
}
break;
}
_add_format(fmtdesc, frmsizeenum.discrete, frmivalenum.discrete.numerator, frmivalenum.discrete.denominator);
}
}
}
close(file_descriptor);
}
void CameraFeedLinux::_add_format(v4l2_fmtdesc p_description, v4l2_frmsize_discrete p_size, int p_frame_numerator, int p_frame_denominator) {
FeedFormat feed_format;
feed_format.width = p_size.width;
feed_format.height = p_size.height;
feed_format.format = String((char *)p_description.description);
feed_format.frame_numerator = p_frame_numerator;
feed_format.frame_denominator = p_frame_denominator;
feed_format.pixel_format = p_description.pixelformat;
print_verbose(vformat("%s %dx%d@%d/%dfps", (char *)p_description.description, p_size.width, p_size.height, p_frame_denominator, p_frame_numerator));
formats.push_back(feed_format);
}
bool CameraFeedLinux::_request_buffers() {
struct v4l2_requestbuffers requestbuffers;
memset(&requestbuffers, 0, sizeof(requestbuffers));
requestbuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
requestbuffers.memory = V4L2_MEMORY_MMAP;
requestbuffers.count = 4;
if (ioctl(file_descriptor, VIDIOC_REQBUFS, &requestbuffers) == -1) {
ERR_FAIL_V_MSG(false, vformat("ioctl(VIDIOC_REQBUFS) error: %d.", errno));
}
ERR_FAIL_COND_V_MSG(requestbuffers.count < 2, false, "Not enough buffers granted.");
buffer_count = requestbuffers.count;
buffers = new StreamingBuffer[buffer_count];
for (unsigned int i = 0; i < buffer_count; i++) {
struct v4l2_buffer buffer;
memset(&buffer, 0, sizeof(buffer));
buffer.type = requestbuffers.type;
buffer.memory = V4L2_MEMORY_MMAP;
buffer.index = i;
if (ioctl(file_descriptor, VIDIOC_QUERYBUF, &buffer) == -1) {
delete[] buffers;
ERR_FAIL_V_MSG(false, vformat("ioctl(VIDIOC_QUERYBUF) error: %d.", errno));
}
buffers[i].length = buffer.length;
buffers[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED, file_descriptor, buffer.m.offset);
if (buffers[i].start == MAP_FAILED) {
for (unsigned int b = 0; b < i; b++) {
_unmap_buffers(i);
}
delete[] buffers;
ERR_FAIL_V_MSG(false, "Mapping buffers failed.");
}
}
return true;
}
bool CameraFeedLinux::_start_capturing() {
for (unsigned int i = 0; i < buffer_count; i++) {
struct v4l2_buffer buffer;
memset(&buffer, 0, sizeof(buffer));
buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buffer.memory = V4L2_MEMORY_MMAP;
buffer.index = i;
if (ioctl(file_descriptor, VIDIOC_QBUF, &buffer) == -1) {
ERR_FAIL_V_MSG(false, vformat("ioctl(VIDIOC_QBUF) error: %d.", errno));
}
}
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(file_descriptor, VIDIOC_STREAMON, &type) == -1) {
ERR_FAIL_V_MSG(false, vformat("ioctl(VIDIOC_STREAMON) error: %d.", errno));
}
return true;
}
void CameraFeedLinux::_read_frame() {
struct v4l2_buffer buffer;
memset(&buffer, 0, sizeof(buffer));
buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buffer.memory = V4L2_MEMORY_MMAP;
if (ioctl(file_descriptor, VIDIOC_DQBUF, &buffer) == -1) {
if (errno != EAGAIN) {
print_error(vformat("ioctl(VIDIOC_DQBUF) error: %d.", errno));
exit_flag.set();
}
return;
}
buffer_decoder->decode(buffers[buffer.index]);
if (ioctl(file_descriptor, VIDIOC_QBUF, &buffer) == -1) {
print_error(vformat("ioctl(VIDIOC_QBUF) error: %d.", errno));
}
emit_signal(SNAME("frame_changed"));
}
void CameraFeedLinux::_stop_capturing() {
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(file_descriptor, VIDIOC_STREAMOFF, &type) == -1) {
print_error(vformat("ioctl(VIDIOC_STREAMOFF) error: %d.", errno));
}
}
void CameraFeedLinux::_unmap_buffers(unsigned int p_count) {
for (unsigned int i = 0; i < p_count; i++) {
munmap(buffers[i].start, buffers[i].length);
}
}
void CameraFeedLinux::_start_thread() {
exit_flag.clear();
thread = memnew(Thread);
thread->start(CameraFeedLinux::update_buffer_thread_func, this);
}
String CameraFeedLinux::get_device_name() const {
return device_name;
}
bool CameraFeedLinux::activate_feed() {
file_descriptor = open(device_name.ascii(), O_RDWR | O_NONBLOCK, 0);
if (_request_buffers() && _start_capturing()) {
buffer_decoder = _create_buffer_decoder();
_start_thread();
return true;
}
ERR_FAIL_V_MSG(false, "Could not activate feed.");
}
BufferDecoder *CameraFeedLinux::_create_buffer_decoder() {
switch (formats[selected_format].pixel_format) {
case V4L2_PIX_FMT_MJPEG:
case V4L2_PIX_FMT_JPEG:
return memnew(JpegBufferDecoder(this));
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_YYUV:
case V4L2_PIX_FMT_YVYU:
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_VYUY: {
String output = parameters["output"];
if (output == "separate") {
return memnew(SeparateYuyvBufferDecoder(this));
}
if (output == "grayscale") {
return memnew(YuyvToGrayscaleBufferDecoder(this));
}
if (output == "copy") {
return memnew(CopyBufferDecoder(this, false));
}
return memnew(YuyvToRgbBufferDecoder(this));
}
default:
return memnew(CopyBufferDecoder(this, true));
}
}
void CameraFeedLinux::deactivate_feed() {
exit_flag.set();
thread->wait_to_finish();
memdelete(thread);
_stop_capturing();
_unmap_buffers(buffer_count);
delete[] buffers;
memdelete(buffer_decoder);
for (int i = 0; i < CameraServer::FEED_IMAGES; i++) {
RID placeholder = RenderingServer::get_singleton()->texture_2d_placeholder_create();
RenderingServer::get_singleton()->texture_replace(texture[i], placeholder);
}
base_width = 0;
base_height = 0;
close(file_descriptor);
emit_signal(SNAME("format_changed"));
}
Array CameraFeedLinux::get_formats() const {
Array result;
for (const FeedFormat &format : formats) {
Dictionary dictionary;
dictionary["width"] = format.width;
dictionary["height"] = format.height;
dictionary["format"] = format.format;
dictionary["frame_numerator"] = format.frame_numerator;
dictionary["frame_denominator"] = format.frame_denominator;
result.push_back(dictionary);
}
return result;
}
CameraFeed::FeedFormat CameraFeedLinux::get_format() const {
return formats[selected_format];
}
bool CameraFeedLinux::set_format(int p_index, const Dictionary &p_parameters) {
ERR_FAIL_COND_V_MSG(active, false, "Feed is active.");
ERR_FAIL_INDEX_V_MSG(p_index, formats.size(), false, "Invalid format index.");
parameters = p_parameters.duplicate();
selected_format = p_index;
FeedFormat feed_format = formats[p_index];
file_descriptor = open(device_name.ascii(), O_RDWR | O_NONBLOCK, 0);
struct v4l2_format format;
memset(&format, 0, sizeof(format));
format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
format.fmt.pix.width = feed_format.width;
format.fmt.pix.height = feed_format.height;
format.fmt.pix.pixelformat = feed_format.pixel_format;
if (ioctl(file_descriptor, VIDIOC_S_FMT, &format) == -1) {
close(file_descriptor);
ERR_FAIL_V_MSG(false, vformat("Cannot set format, error: %d.", errno));
}
if (feed_format.frame_numerator > 0) {
struct v4l2_streamparm param;
memset(&param, 0, sizeof(param));
param.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
param.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
param.parm.capture.timeperframe.numerator = feed_format.frame_numerator;
param.parm.capture.timeperframe.denominator = feed_format.frame_denominator;
if (ioctl(file_descriptor, VIDIOC_S_PARM, &param) == -1) {
close(file_descriptor);
ERR_FAIL_V_MSG(false, vformat("Cannot set framerate, error: %d.", errno));
}
}
close(file_descriptor);
emit_signal(SNAME("format_changed"));
return true;
}
CameraFeedLinux::CameraFeedLinux(const String &p_device_name) :
CameraFeed() {
device_name = p_device_name;
_query_device(device_name);
set_format(0, Dictionary());
}
CameraFeedLinux::~CameraFeedLinux() {
if (is_active()) {
deactivate_feed();
}
}

View File

@ -0,0 +1,78 @@
/**************************************************************************/
/* camera_feed_linux.h */
/**************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/**************************************************************************/
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
#ifndef CAMERA_FEED_LINUX_H
#define CAMERA_FEED_LINUX_H
#include "buffer_decoder.h"
#include "core/os/thread.h"
#include "servers/camera/camera_feed.h"
#include <linux/videodev2.h>
struct StreamingBuffer;
class CameraFeedLinux : public CameraFeed {
private:
SafeFlag exit_flag;
Thread *thread = nullptr;
String device_name;
int file_descriptor = -1;
StreamingBuffer *buffers = nullptr;
unsigned int buffer_count = 0;
BufferDecoder *buffer_decoder = nullptr;
static void update_buffer_thread_func(void *p_func);
void _update_buffer();
void _query_device(const String &p_device_name);
void _add_format(v4l2_fmtdesc description, v4l2_frmsize_discrete size, int frame_numerator, int frame_denominator);
bool _request_buffers();
bool _start_capturing();
void _read_frame();
void _stop_capturing();
void _unmap_buffers(unsigned int p_count);
BufferDecoder *_create_buffer_decoder();
void _start_thread();
public:
String get_device_name() const;
bool activate_feed();
void deactivate_feed();
bool set_format(int p_index, const Dictionary &p_parameters);
Array get_formats() const;
FeedFormat get_format() const;
CameraFeedLinux(const String &p_device_name);
virtual ~CameraFeedLinux();
};
#endif // CAMERA_FEED_LINUX_H

View File

@ -0,0 +1,169 @@
/**************************************************************************/
/* camera_linux.cpp */
/**************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/**************************************************************************/
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
#include "camera_linux.h"
#include "camera_feed_linux.h"
#include <dirent.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/stat.h>
#include <unistd.h>
void CameraLinux::camera_thread_func(void *p_camera_linux) {
if (p_camera_linux) {
CameraLinux *camera_linux = (CameraLinux *)p_camera_linux;
camera_linux->_update_devices();
}
}
void CameraLinux::_update_devices() {
while (!exit_flag.is_set()) {
{
MutexLock lock(camera_mutex);
for (int i = feeds.size() - 1; i >= 0; i--) {
Ref<CameraFeedLinux> feed = (Ref<CameraFeedLinux>)feeds[i];
String device_name = feed->get_device_name();
if (!_is_active(device_name)) {
remove_feed(feed);
}
}
DIR *devices = opendir("/dev");
if (devices) {
struct dirent *device;
while ((device = readdir(devices)) != nullptr) {
if (strncmp(device->d_name, "video", 5) != 0) {
continue;
}
String device_name = String("/dev/") + String(device->d_name);
if (!_has_device(device_name)) {
_add_device(device_name);
}
}
}
closedir(devices);
}
usleep(1000000);
}
}
bool CameraLinux::_has_device(const String &p_device_name) {
for (int i = 0; i < feeds.size(); i++) {
Ref<CameraFeedLinux> feed = (Ref<CameraFeedLinux>)feeds[i];
if (feed->get_device_name() == p_device_name) {
return true;
}
}
return false;
}
void CameraLinux::_add_device(const String &p_device_name) {
int file_descriptor = _open_device(p_device_name);
if (file_descriptor != -1) {
if (_is_video_capture_device(file_descriptor)) {
Ref<CameraFeedLinux> feed = memnew(CameraFeedLinux(p_device_name));
add_feed(feed);
}
}
close(file_descriptor);
}
int CameraLinux::_open_device(const String &p_device_name) {
struct stat s;
if (stat(p_device_name.ascii(), &s) == -1) {
return -1;
}
if (!S_ISCHR(s.st_mode)) {
return -1;
}
return open(p_device_name.ascii(), O_RDWR | O_NONBLOCK, 0);
}
// TODO any cheaper/cleaner way to check if file descriptor is invalid?
bool CameraLinux::_is_active(const String &p_device_name) {
struct v4l2_capability capability;
bool result = false;
int file_descriptor = _open_device(p_device_name);
if (file_descriptor != -1 && ioctl(file_descriptor, VIDIOC_QUERYCAP, &capability) != -1) {
result = true;
}
close(file_descriptor);
return result;
}
bool CameraLinux::_is_video_capture_device(int p_file_descriptor) {
struct v4l2_capability capability;
if (ioctl(p_file_descriptor, VIDIOC_QUERYCAP, &capability) == -1) {
print_verbose("Cannot query device");
return false;
}
if (!(capability.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
print_verbose(vformat("%s is no video capture device\n", String((char *)capability.card)));
return false;
}
if (!(capability.capabilities & V4L2_CAP_STREAMING)) {
print_verbose(vformat("%s does not support streaming", String((char *)capability.card)));
return false;
}
return _can_query_format(p_file_descriptor, V4L2_BUF_TYPE_VIDEO_CAPTURE);
}
bool CameraLinux::_can_query_format(int p_file_descriptor, int p_type) {
struct v4l2_format format;
memset(&format, 0, sizeof(format));
format.type = p_type;
return ioctl(p_file_descriptor, VIDIOC_G_FMT, &format) != -1;
}
CameraLinux::CameraLinux() {
camera_thread.start(CameraLinux::camera_thread_func, this);
};
CameraLinux::~CameraLinux() {
exit_flag.set();
camera_thread.wait_to_finish();
}

View File

@ -0,0 +1,60 @@
/**************************************************************************/
/* camera_linux.h */
/**************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/**************************************************************************/
/* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
/* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/**************************************************************************/
#ifndef CAMERA_LINUX_H
#define CAMERA_LINUX_H
#include "core/os/mutex.h"
#include "core/os/thread.h"
#include "servers/camera_server.h"
class CameraLinux : public CameraServer {
private:
SafeFlag exit_flag;
Thread camera_thread;
Mutex camera_mutex;
static void camera_thread_func(void *p_camera_linux);
void _update_devices();
bool _has_device(const String &p_device_name);
void _add_device(const String &p_device_name);
void _remove_device(const String &p_device_name);
int _open_device(const String &p_device_name);
bool _is_active(const String &p_device_name);
bool _is_video_capture_device(int p_file_descriptor);
bool _can_query_format(int p_file_descriptor, int p_type);
public:
CameraLinux();
~CameraLinux();
};
#endif // CAMERA_LINUX_H

View File

@ -1,5 +1,5 @@
def can_build(env, platform): def can_build(env, platform):
return platform == "macos" or platform == "windows" return platform == "macos" or platform == "windows" or platform == "linuxbsd"
def configure(env): def configure(env):

View File

@ -30,6 +30,9 @@
#include "register_types.h" #include "register_types.h"
#if defined(LINUXBSD_ENABLED)
#include "camera_linux.h"
#endif
#if defined(WINDOWS_ENABLED) #if defined(WINDOWS_ENABLED)
#include "camera_win.h" #include "camera_win.h"
#endif #endif
@ -42,6 +45,9 @@ void initialize_camera_module(ModuleInitializationLevel p_level) {
return; return;
} }
#if defined(LINUXBSD_ENABLED)
CameraServer::make_default<CameraLinux>();
#endif
#if defined(WINDOWS_ENABLED) #if defined(WINDOWS_ENABLED)
CameraServer::make_default<CameraWindows>(); CameraServer::make_default<CameraWindows>();
#endif #endif

View File

@ -70,6 +70,7 @@ ignore-words-list = """\
numer, numer,
ot, ot,
outin, outin,
parm,
requestor, requestor,
te, te,
textin, textin,

View File

@ -47,6 +47,11 @@ void CameraTexture::_bind_methods() {
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "camera_is_active"), "set_camera_active", "get_camera_active"); ADD_PROPERTY(PropertyInfo(Variant::BOOL, "camera_is_active"), "set_camera_active", "get_camera_active");
} }
void CameraTexture::_on_format_changed() {
// FIXME: `emit_changed` is more appropriate, but causes errors for some reason.
callable_mp((Resource *)this, &Resource::emit_changed).call_deferred();
}
int CameraTexture::get_width() const { int CameraTexture::get_width() const {
Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id); Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
if (feed.is_valid()) { if (feed.is_valid()) {
@ -82,13 +87,26 @@ RID CameraTexture::get_rid() const {
} }
Ref<Image> CameraTexture::get_image() const { Ref<Image> CameraTexture::get_image() const {
// not (yet) supported return RenderingServer::get_singleton()->texture_2d_get(get_rid());
return Ref<Image>();
} }
void CameraTexture::set_camera_feed_id(int p_new_id) { void CameraTexture::set_camera_feed_id(int p_new_id) {
Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
if (feed.is_valid()) {
if (feed->is_connected("format_changed", callable_mp(this, &CameraTexture::_on_format_changed))) {
feed->disconnect("format_changed", callable_mp(this, &CameraTexture::_on_format_changed));
}
}
camera_feed_id = p_new_id; camera_feed_id = p_new_id;
feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
if (feed.is_valid()) {
feed->connect("format_changed", callable_mp(this, &CameraTexture::_on_format_changed));
}
notify_property_list_changed(); notify_property_list_changed();
callable_mp((Resource *)this, &Resource::emit_changed).call_deferred();
} }
int CameraTexture::get_camera_feed_id() const { int CameraTexture::get_camera_feed_id() const {
@ -98,6 +116,7 @@ int CameraTexture::get_camera_feed_id() const {
void CameraTexture::set_which_feed(CameraServer::FeedImage p_which) { void CameraTexture::set_which_feed(CameraServer::FeedImage p_which) {
which_feed = p_which; which_feed = p_which;
notify_property_list_changed(); notify_property_list_changed();
callable_mp((Resource *)this, &Resource::emit_changed).call_deferred();
} }
CameraServer::FeedImage CameraTexture::get_which_feed() const { CameraServer::FeedImage CameraTexture::get_which_feed() const {
@ -109,6 +128,7 @@ void CameraTexture::set_camera_active(bool p_active) {
if (feed.is_valid()) { if (feed.is_valid()) {
feed->set_active(p_active); feed->set_active(p_active);
notify_property_list_changed(); notify_property_list_changed();
callable_mp((Resource *)this, &Resource::emit_changed).call_deferred();
} }
} }

View File

@ -43,6 +43,7 @@ private:
protected: protected:
static void _bind_methods(); static void _bind_methods();
void _on_format_changed();
public: public:
virtual int get_width() const override; virtual int get_width() const override;

View File

@ -56,9 +56,16 @@ void CameraFeed::_bind_methods() {
ClassDB::bind_method(D_METHOD("get_datatype"), &CameraFeed::get_datatype); ClassDB::bind_method(D_METHOD("get_datatype"), &CameraFeed::get_datatype);
ClassDB::bind_method(D_METHOD("get_formats"), &CameraFeed::get_formats);
ClassDB::bind_method(D_METHOD("set_format", "index", "parameters"), &CameraFeed::set_format);
ADD_SIGNAL(MethodInfo("frame_changed"));
ADD_SIGNAL(MethodInfo("format_changed"));
ADD_GROUP("Feed", "feed_"); ADD_GROUP("Feed", "feed_");
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "feed_is_active"), "set_active", "is_active"); ADD_PROPERTY(PropertyInfo(Variant::BOOL, "feed_is_active"), "set_active", "is_active");
ADD_PROPERTY(PropertyInfo(Variant::TRANSFORM2D, "feed_transform"), "set_transform", "get_transform"); ADD_PROPERTY(PropertyInfo(Variant::TRANSFORM2D, "feed_transform"), "set_transform", "get_transform");
ADD_PROPERTY(PropertyInfo(Variant::ARRAY, "formats"), "", "get_formats");
BIND_ENUM_CONSTANT(FEED_NOIMAGE); BIND_ENUM_CONSTANT(FEED_NOIMAGE);
BIND_ENUM_CONSTANT(FEED_RGB); BIND_ENUM_CONSTANT(FEED_RGB);
@ -84,13 +91,11 @@ void CameraFeed::set_active(bool p_is_active) {
} else if (p_is_active) { } else if (p_is_active) {
// attempt to activate this feed // attempt to activate this feed
if (activate_feed()) { if (activate_feed()) {
print_line("Activate " + name);
active = true; active = true;
} }
} else { } else {
// just deactivate it // just deactivate it
deactivate_feed(); deactivate_feed();
print_line("Deactivate " + name);
active = false; active = false;
} }
} }
@ -183,6 +188,8 @@ void CameraFeed::set_RGB_img(const Ref<Image> &p_rgb_img) {
RID new_texture = RenderingServer::get_singleton()->texture_2d_create(p_rgb_img); RID new_texture = RenderingServer::get_singleton()->texture_2d_create(p_rgb_img);
RenderingServer::get_singleton()->texture_replace(texture[CameraServer::FEED_RGBA_IMAGE], new_texture); RenderingServer::get_singleton()->texture_replace(texture[CameraServer::FEED_RGBA_IMAGE], new_texture);
emit_signal(SNAME("format_changed"));
} else { } else {
RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_RGBA_IMAGE], p_rgb_img); RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_RGBA_IMAGE], p_rgb_img);
} }
@ -204,6 +211,8 @@ void CameraFeed::set_YCbCr_img(const Ref<Image> &p_ycbcr_img) {
RID new_texture = RenderingServer::get_singleton()->texture_2d_create(p_ycbcr_img); RID new_texture = RenderingServer::get_singleton()->texture_2d_create(p_ycbcr_img);
RenderingServer::get_singleton()->texture_replace(texture[CameraServer::FEED_RGBA_IMAGE], new_texture); RenderingServer::get_singleton()->texture_replace(texture[CameraServer::FEED_RGBA_IMAGE], new_texture);
emit_signal(SNAME("format_changed"));
} else { } else {
RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_RGBA_IMAGE], p_ycbcr_img); RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_RGBA_IMAGE], p_ycbcr_img);
} }
@ -235,6 +244,8 @@ void CameraFeed::set_YCbCr_imgs(const Ref<Image> &p_y_img, const Ref<Image> &p_c
RID new_texture = RenderingServer::get_singleton()->texture_2d_create(p_cbcr_img); RID new_texture = RenderingServer::get_singleton()->texture_2d_create(p_cbcr_img);
RenderingServer::get_singleton()->texture_replace(texture[CameraServer::FEED_CBCR_IMAGE], new_texture); RenderingServer::get_singleton()->texture_replace(texture[CameraServer::FEED_CBCR_IMAGE], new_texture);
} }
emit_signal(SNAME("format_changed"));
} else { } else {
RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_Y_IMAGE], p_y_img); RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_Y_IMAGE], p_y_img);
RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_CBCR_IMAGE], p_cbcr_img); RenderingServer::get_singleton()->texture_2d_update(texture[CameraServer::FEED_CBCR_IMAGE], p_cbcr_img);
@ -252,3 +263,16 @@ bool CameraFeed::activate_feed() {
void CameraFeed::deactivate_feed() { void CameraFeed::deactivate_feed() {
// nothing to do here // nothing to do here
} }
bool CameraFeed::set_format(int p_index, const Dictionary &p_parameters) {
return false;
}
Array CameraFeed::get_formats() const {
return Array();
}
CameraFeed::FeedFormat CameraFeed::get_format() const {
FeedFormat feed_format = {};
return feed_format;
}

View File

@ -60,14 +60,26 @@ public:
private: private:
int id; // unique id for this, for internal use in case feeds are removed int id; // unique id for this, for internal use in case feeds are removed
int base_width;
int base_height;
protected: protected:
struct FeedFormat {
int width = 0;
int height = 0;
String format;
int frame_numerator = 0;
int frame_denominator = 0;
uint32_t pixel_format = 0;
};
String name; // name of our camera feed String name; // name of our camera feed
FeedDataType datatype; // type of texture data stored FeedDataType datatype; // type of texture data stored
FeedPosition position; // position of camera on the device FeedPosition position; // position of camera on the device
Transform2D transform; // display transform Transform2D transform; // display transform
int base_width = 0;
int base_height = 0;
Vector<FeedFormat> formats;
Dictionary parameters;
int selected_format = -1;
bool active; // only when active do we actually update the camera texture each frame bool active; // only when active do we actually update the camera texture each frame
RID texture[CameraServer::FEED_IMAGES]; // texture images needed for this RID texture[CameraServer::FEED_IMAGES]; // texture images needed for this
@ -102,6 +114,10 @@ public:
void set_YCbCr_img(const Ref<Image> &p_ycbcr_img); void set_YCbCr_img(const Ref<Image> &p_ycbcr_img);
void set_YCbCr_imgs(const Ref<Image> &p_y_img, const Ref<Image> &p_cbcr_img); void set_YCbCr_imgs(const Ref<Image> &p_y_img, const Ref<Image> &p_cbcr_img);
virtual bool set_format(int p_index, const Dictionary &p_parameters);
virtual Array get_formats() const;
virtual FeedFormat get_format() const;
virtual bool activate_feed(); virtual bool activate_feed();
virtual void deactivate_feed(); virtual void deactivate_feed();
}; };