2020-10-15 15:29:59 +00:00
|
|
|
/*************************************************************************/
|
|
|
|
/* binder_common.h */
|
|
|
|
/*************************************************************************/
|
|
|
|
/* This file is part of: */
|
|
|
|
/* GODOT ENGINE */
|
|
|
|
/* https://godotengine.org */
|
|
|
|
/*************************************************************************/
|
2022-01-03 20:27:34 +00:00
|
|
|
/* Copyright (c) 2007-2022 Juan Linietsky, Ariel Manzur. */
|
|
|
|
/* Copyright (c) 2014-2022 Godot Engine contributors (cf. AUTHORS.md). */
|
2020-10-15 15:29:59 +00:00
|
|
|
/* */
|
|
|
|
/* Permission is hereby granted, free of charge, to any person obtaining */
|
|
|
|
/* a copy of this software and associated documentation files (the */
|
|
|
|
/* "Software"), to deal in the Software without restriction, including */
|
|
|
|
/* without limitation the rights to use, copy, modify, merge, publish, */
|
|
|
|
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
|
|
|
/* permit persons to whom the Software is furnished to do so, subject to */
|
|
|
|
/* the following conditions: */
|
|
|
|
/* */
|
|
|
|
/* The above copyright notice and this permission notice shall be */
|
|
|
|
/* included in all copies or substantial portions of the Software. */
|
|
|
|
/* */
|
|
|
|
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
|
|
|
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
|
|
|
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
|
|
|
|
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
|
|
|
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
|
|
|
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
|
|
|
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
|
|
|
/*************************************************************************/
|
|
|
|
|
|
|
|
#ifndef BINDER_COMMON_H
|
|
|
|
#define BINDER_COMMON_H
|
|
|
|
|
2021-03-25 23:58:48 +00:00
|
|
|
#include "core/input/input_enums.h"
|
2020-11-07 22:33:38 +00:00
|
|
|
#include "core/object/object.h"
|
2021-06-20 17:12:33 +00:00
|
|
|
#include "core/os/keyboard.h"
|
2020-11-07 22:33:38 +00:00
|
|
|
#include "core/templates/list.h"
|
|
|
|
#include "core/templates/simple_type.h"
|
2020-10-15 15:29:59 +00:00
|
|
|
#include "core/typedefs.h"
|
2020-11-07 22:33:38 +00:00
|
|
|
#include "core/variant/method_ptrcall.h"
|
|
|
|
#include "core/variant/type_info.h"
|
|
|
|
#include "core/variant/variant.h"
|
|
|
|
#include "core/variant/variant_internal.h"
|
2020-10-15 15:29:59 +00:00
|
|
|
|
|
|
|
#include <stdio.h>
|
|
|
|
|
2022-01-25 15:37:41 +00:00
|
|
|
// Variant cannot define an implicit cast operator for every Object subclass, so the
|
|
|
|
// casting is done here, to allow binding methods with parameters more specific than Object *
|
|
|
|
|
2020-10-15 15:29:59 +00:00
|
|
|
template <class T>
|
|
|
|
struct VariantCaster {
|
|
|
|
static _FORCE_INLINE_ T cast(const Variant &p_variant) {
|
2022-01-25 15:37:41 +00:00
|
|
|
using TStripped = std::remove_pointer_t<T>;
|
|
|
|
if constexpr (std::is_base_of<Object, TStripped>::value) {
|
|
|
|
return Object::cast_to<TStripped>(p_variant);
|
|
|
|
} else {
|
|
|
|
return p_variant;
|
|
|
|
}
|
2020-10-15 15:29:59 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
template <class T>
|
|
|
|
struct VariantCaster<T &> {
|
|
|
|
static _FORCE_INLINE_ T cast(const Variant &p_variant) {
|
2022-01-25 15:37:41 +00:00
|
|
|
using TStripped = std::remove_pointer_t<T>;
|
|
|
|
if constexpr (std::is_base_of<Object, TStripped>::value) {
|
|
|
|
return Object::cast_to<TStripped>(p_variant);
|
|
|
|
} else {
|
|
|
|
return p_variant;
|
|
|
|
}
|
2020-10-15 15:29:59 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
template <class T>
|
|
|
|
struct VariantCaster<const T &> {
|
|
|
|
static _FORCE_INLINE_ T cast(const Variant &p_variant) {
|
2022-01-25 15:37:41 +00:00
|
|
|
using TStripped = std::remove_pointer_t<T>;
|
|
|
|
if constexpr (std::is_base_of<Object, TStripped>::value) {
|
|
|
|
return Object::cast_to<TStripped>(p_variant);
|
|
|
|
} else {
|
|
|
|
return p_variant;
|
|
|
|
}
|
2020-10-15 15:29:59 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
#define VARIANT_ENUM_CAST(m_enum) \
|
|
|
|
MAKE_ENUM_TYPE_INFO(m_enum) \
|
|
|
|
template <> \
|
|
|
|
struct VariantCaster<m_enum> { \
|
|
|
|
static _FORCE_INLINE_ m_enum cast(const Variant &p_variant) { \
|
2021-07-27 10:07:42 +00:00
|
|
|
return (m_enum)p_variant.operator int64_t(); \
|
2020-10-15 15:29:59 +00:00
|
|
|
} \
|
|
|
|
}; \
|
|
|
|
template <> \
|
|
|
|
struct PtrToArg<m_enum> { \
|
|
|
|
_FORCE_INLINE_ static m_enum convert(const void *p_ptr) { \
|
2021-07-27 10:07:42 +00:00
|
|
|
return m_enum(*reinterpret_cast<const int64_t *>(p_ptr)); \
|
2020-10-15 15:29:59 +00:00
|
|
|
} \
|
2021-06-19 15:58:49 +00:00
|
|
|
typedef int64_t EncodeT; \
|
2020-10-15 15:29:59 +00:00
|
|
|
_FORCE_INLINE_ static void encode(m_enum p_val, const void *p_ptr) { \
|
2021-08-13 21:31:57 +00:00
|
|
|
*(int64_t *)p_ptr = (int64_t)p_val; \
|
2020-10-15 15:29:59 +00:00
|
|
|
} \
|
2022-03-14 14:52:03 +00:00
|
|
|
}; \
|
|
|
|
template <> \
|
|
|
|
struct ZeroInitializer<m_enum> { \
|
|
|
|
static void initialize(m_enum &value) { value = (m_enum)0; } \
|
2020-10-15 15:29:59 +00:00
|
|
|
};
|
|
|
|
|
2022-06-24 09:16:37 +00:00
|
|
|
#define VARIANT_BITFIELD_CAST(m_enum) \
|
|
|
|
MAKE_BITFIELD_TYPE_INFO(m_enum) \
|
|
|
|
template <> \
|
|
|
|
struct VariantCaster<BitField<m_enum>> { \
|
|
|
|
static _FORCE_INLINE_ BitField<m_enum> cast(const Variant &p_variant) { \
|
|
|
|
return BitField<m_enum>(p_variant.operator int64_t()); \
|
|
|
|
} \
|
|
|
|
}; \
|
|
|
|
template <> \
|
|
|
|
struct PtrToArg<BitField<m_enum>> { \
|
|
|
|
_FORCE_INLINE_ static BitField<m_enum> convert(const void *p_ptr) { \
|
|
|
|
return BitField<m_enum>(*reinterpret_cast<const int64_t *>(p_ptr)); \
|
|
|
|
} \
|
|
|
|
typedef int64_t EncodeT; \
|
|
|
|
_FORCE_INLINE_ static void encode(BitField<m_enum> p_val, const void *p_ptr) { \
|
|
|
|
*(int64_t *)p_ptr = p_val; \
|
|
|
|
} \
|
|
|
|
}; \
|
|
|
|
template <> \
|
|
|
|
struct ZeroInitializer<BitField<m_enum>> { \
|
|
|
|
static void initialize(BitField<m_enum> &value) { value = 0; } \
|
|
|
|
};
|
|
|
|
|
2020-10-15 15:29:59 +00:00
|
|
|
// Object enum casts must go here
|
|
|
|
VARIANT_ENUM_CAST(Object::ConnectFlags);
|
|
|
|
|
2019-11-28 08:48:51 +00:00
|
|
|
VARIANT_ENUM_CAST(Vector2::Axis);
|
|
|
|
VARIANT_ENUM_CAST(Vector2i::Axis);
|
2020-10-15 15:29:59 +00:00
|
|
|
VARIANT_ENUM_CAST(Vector3::Axis);
|
2019-11-28 08:48:51 +00:00
|
|
|
VARIANT_ENUM_CAST(Vector3i::Axis);
|
Implement Vector4, Vector4i, Projection
Implement built-in classes Vector4, Vector4i and Projection.
* Two versions of Vector4 (float and integer).
* A Projection class, which is a 4x4 matrix specialized in projection types.
These types have been requested for a long time, but given they were very corner case they were not added before.
Because in Godot 4, reimplementing parts of the rendering engine is now possible, access to these types (heavily used by the rendering code) becomes a necessity.
**Q**: Why Projection and not Matrix4?
**A**: Godot does not use Matrix2, Matrix3, Matrix4x3, etc. naming convention because, within the engine, these types always have a *purpose*. As such, Godot names them: Transform2D, Transform3D or Basis. In this case, this 4x4 matrix is _always_ used as a _Projection_, hence the naming.
2022-07-19 23:11:13 +00:00
|
|
|
VARIANT_ENUM_CAST(Vector4::Axis);
|
|
|
|
VARIANT_ENUM_CAST(Vector4i::Axis);
|
2021-10-21 16:38:20 +00:00
|
|
|
VARIANT_ENUM_CAST(Basis::EulerOrder);
|
Implement Vector4, Vector4i, Projection
Implement built-in classes Vector4, Vector4i and Projection.
* Two versions of Vector4 (float and integer).
* A Projection class, which is a 4x4 matrix specialized in projection types.
These types have been requested for a long time, but given they were very corner case they were not added before.
Because in Godot 4, reimplementing parts of the rendering engine is now possible, access to these types (heavily used by the rendering code) becomes a necessity.
**Q**: Why Projection and not Matrix4?
**A**: Godot does not use Matrix2, Matrix3, Matrix4x3, etc. naming convention because, within the engine, these types always have a *purpose*. As such, Godot names them: Transform2D, Transform3D or Basis. In this case, this 4x4 matrix is _always_ used as a _Projection_, hence the naming.
2022-07-19 23:11:13 +00:00
|
|
|
VARIANT_ENUM_CAST(Projection::Planes);
|
2020-10-15 15:29:59 +00:00
|
|
|
|
|
|
|
VARIANT_ENUM_CAST(Error);
|
2020-12-22 16:24:29 +00:00
|
|
|
VARIANT_ENUM_CAST(Side);
|
2021-02-23 16:56:28 +00:00
|
|
|
VARIANT_ENUM_CAST(ClockDirection);
|
2020-10-15 15:29:59 +00:00
|
|
|
VARIANT_ENUM_CAST(Corner);
|
2021-03-25 23:58:48 +00:00
|
|
|
VARIANT_ENUM_CAST(HatDir);
|
|
|
|
VARIANT_ENUM_CAST(HatMask);
|
|
|
|
VARIANT_ENUM_CAST(JoyAxis);
|
|
|
|
VARIANT_ENUM_CAST(JoyButton);
|
2021-06-20 17:12:33 +00:00
|
|
|
VARIANT_ENUM_CAST(Key);
|
|
|
|
VARIANT_ENUM_CAST(KeyModifierMask);
|
2021-03-25 23:58:48 +00:00
|
|
|
VARIANT_ENUM_CAST(MIDIMessage);
|
|
|
|
VARIANT_ENUM_CAST(MouseButton);
|
2020-10-15 15:29:59 +00:00
|
|
|
VARIANT_ENUM_CAST(Orientation);
|
2021-11-25 02:58:47 +00:00
|
|
|
VARIANT_ENUM_CAST(HorizontalAlignment);
|
|
|
|
VARIANT_ENUM_CAST(VerticalAlignment);
|
|
|
|
VARIANT_ENUM_CAST(InlineAlignment);
|
2020-10-15 15:29:59 +00:00
|
|
|
VARIANT_ENUM_CAST(PropertyHint);
|
|
|
|
VARIANT_ENUM_CAST(PropertyUsageFlags);
|
|
|
|
VARIANT_ENUM_CAST(Variant::Type);
|
|
|
|
VARIANT_ENUM_CAST(Variant::Operator);
|
|
|
|
|
|
|
|
template <>
|
|
|
|
struct VariantCaster<char32_t> {
|
|
|
|
static _FORCE_INLINE_ char32_t cast(const Variant &p_variant) {
|
|
|
|
return (char32_t)p_variant.operator int();
|
|
|
|
}
|
|
|
|
};
|
2020-11-25 13:08:17 +00:00
|
|
|
|
2020-10-15 15:29:59 +00:00
|
|
|
template <>
|
|
|
|
struct PtrToArg<char32_t> {
|
|
|
|
_FORCE_INLINE_ static char32_t convert(const void *p_ptr) {
|
|
|
|
return char32_t(*reinterpret_cast<const int *>(p_ptr));
|
|
|
|
}
|
2021-06-19 15:58:49 +00:00
|
|
|
typedef int64_t EncodeT;
|
2020-10-15 15:29:59 +00:00
|
|
|
_FORCE_INLINE_ static void encode(char32_t p_val, const void *p_ptr) {
|
|
|
|
*(int *)p_ptr = p_val;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
struct VariantObjectClassChecker {
|
|
|
|
static _FORCE_INLINE_ bool check(const Variant &p_variant) {
|
2022-01-25 15:37:41 +00:00
|
|
|
using TStripped = std::remove_pointer_t<T>;
|
|
|
|
if constexpr (std::is_base_of<Object, TStripped>::value) {
|
|
|
|
Object *obj = p_variant;
|
|
|
|
return Object::cast_to<TStripped>(p_variant) || !obj;
|
|
|
|
} else {
|
|
|
|
return true;
|
|
|
|
}
|
2020-10-15 15:29:59 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-10-06 21:22:25 +00:00
|
|
|
template <typename T>
|
|
|
|
class Ref;
|
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
struct VariantObjectClassChecker<const Ref<T> &> {
|
|
|
|
static _FORCE_INLINE_ bool check(const Variant &p_variant) {
|
|
|
|
Object *obj = p_variant;
|
|
|
|
const Ref<T> node = p_variant;
|
|
|
|
return node.ptr() || !obj;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-10-15 15:29:59 +00:00
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
|
|
|
|
template <class T>
|
|
|
|
struct VariantCasterAndValidate {
|
|
|
|
static _FORCE_INLINE_ T cast(const Variant **p_args, uint32_t p_arg_idx, Callable::CallError &r_error) {
|
|
|
|
Variant::Type argtype = GetTypeInfo<T>::VARIANT_TYPE;
|
|
|
|
if (!Variant::can_convert_strict(p_args[p_arg_idx]->get_type(), argtype) ||
|
2022-03-09 13:58:40 +00:00
|
|
|
!VariantObjectClassChecker<T>::check(*p_args[p_arg_idx])) {
|
2020-10-15 15:29:59 +00:00
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_INVALID_ARGUMENT;
|
|
|
|
r_error.argument = p_arg_idx;
|
|
|
|
r_error.expected = argtype;
|
|
|
|
}
|
|
|
|
|
|
|
|
return VariantCaster<T>::cast(*p_args[p_arg_idx]);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
template <class T>
|
|
|
|
struct VariantCasterAndValidate<T &> {
|
|
|
|
static _FORCE_INLINE_ T cast(const Variant **p_args, uint32_t p_arg_idx, Callable::CallError &r_error) {
|
|
|
|
Variant::Type argtype = GetTypeInfo<T>::VARIANT_TYPE;
|
|
|
|
if (!Variant::can_convert_strict(p_args[p_arg_idx]->get_type(), argtype) ||
|
2022-03-09 13:58:40 +00:00
|
|
|
!VariantObjectClassChecker<T>::check(*p_args[p_arg_idx])) {
|
2020-10-15 15:29:59 +00:00
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_INVALID_ARGUMENT;
|
|
|
|
r_error.argument = p_arg_idx;
|
|
|
|
r_error.expected = argtype;
|
|
|
|
}
|
|
|
|
|
|
|
|
return VariantCaster<T>::cast(*p_args[p_arg_idx]);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
template <class T>
|
|
|
|
struct VariantCasterAndValidate<const T &> {
|
|
|
|
static _FORCE_INLINE_ T cast(const Variant **p_args, uint32_t p_arg_idx, Callable::CallError &r_error) {
|
|
|
|
Variant::Type argtype = GetTypeInfo<T>::VARIANT_TYPE;
|
|
|
|
if (!Variant::can_convert_strict(p_args[p_arg_idx]->get_type(), argtype) ||
|
2022-03-09 13:58:40 +00:00
|
|
|
!VariantObjectClassChecker<T>::check(*p_args[p_arg_idx])) {
|
2020-10-15 15:29:59 +00:00
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_INVALID_ARGUMENT;
|
|
|
|
r_error.argument = p_arg_idx;
|
|
|
|
r_error.expected = argtype;
|
|
|
|
}
|
|
|
|
|
|
|
|
return VariantCaster<T>::cast(*p_args[p_arg_idx]);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
#endif // DEBUG_METHODS_ENABLED
|
|
|
|
|
|
|
|
template <class T, class... P, size_t... Is>
|
|
|
|
void call_with_variant_args_helper(T *p_instance, void (T::*p_method)(P...), const Variant **p_args, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
(p_instance->*p_method)(VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
(p_instance->*p_method)(VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
(void)(p_args); //avoid warning
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class... P, size_t... Is>
|
|
|
|
void call_with_variant_argsc_helper(T *p_instance, void (T::*p_method)(P...) const, const Variant **p_args, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
(p_instance->*p_method)(VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
(p_instance->*p_method)(VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
(void)(p_args); //avoid warning
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class... P, size_t... Is>
|
|
|
|
void call_with_ptr_args_helper(T *p_instance, void (T::*p_method)(P...), const void **p_args, IndexSequence<Is...>) {
|
|
|
|
(p_instance->*p_method)(PtrToArg<P>::convert(p_args[Is])...);
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class... P, size_t... Is>
|
|
|
|
void call_with_ptr_argsc_helper(T *p_instance, void (T::*p_method)(P...) const, const void **p_args, IndexSequence<Is...>) {
|
|
|
|
(p_instance->*p_method)(PtrToArg<P>::convert(p_args[Is])...);
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class R, class... P, size_t... Is>
|
|
|
|
void call_with_ptr_args_ret_helper(T *p_instance, R (T::*p_method)(P...), const void **p_args, void *r_ret, IndexSequence<Is...>) {
|
|
|
|
PtrToArg<R>::encode((p_instance->*p_method)(PtrToArg<P>::convert(p_args[Is])...), r_ret);
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class R, class... P, size_t... Is>
|
|
|
|
void call_with_ptr_args_retc_helper(T *p_instance, R (T::*p_method)(P...) const, const void **p_args, void *r_ret, IndexSequence<Is...>) {
|
|
|
|
PtrToArg<R>::encode((p_instance->*p_method)(PtrToArg<P>::convert(p_args[Is])...), r_ret);
|
|
|
|
}
|
|
|
|
|
Add marshalling to PackedByteArray
-Decode/Encode functions for u8,s8,u16,s16,u32,s32,u64,s64,half,float,double,variant
-Improved binder template to allow this
Given in Godot 4.0 PackedByteArray is passed as reference, it is now possible to have these functions there, which makes the most sense.
2021-04-09 20:57:37 +00:00
|
|
|
template <class T, class... P, size_t... Is>
|
|
|
|
void call_with_ptr_args_static_helper(T *p_instance, void (*p_method)(T *, P...), const void **p_args, IndexSequence<Is...>) {
|
|
|
|
p_method(p_instance, PtrToArg<P>::convert(p_args[Is])...);
|
|
|
|
}
|
|
|
|
|
2020-10-15 15:29:59 +00:00
|
|
|
template <class T, class R, class... P, size_t... Is>
|
|
|
|
void call_with_ptr_args_static_retc_helper(T *p_instance, R (*p_method)(T *, P...), const void **p_args, void *r_ret, IndexSequence<Is...>) {
|
|
|
|
PtrToArg<R>::encode(p_method(p_instance, PtrToArg<P>::convert(p_args[Is])...), r_ret);
|
|
|
|
}
|
|
|
|
|
2021-02-24 13:56:34 +00:00
|
|
|
template <class R, class... P, size_t... Is>
|
|
|
|
void call_with_ptr_args_static_method_ret_helper(R (*p_method)(P...), const void **p_args, void *r_ret, IndexSequence<Is...>) {
|
|
|
|
PtrToArg<R>::encode(p_method(PtrToArg<P>::convert(p_args[Is])...), r_ret);
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class... P, size_t... Is>
|
|
|
|
void call_with_ptr_args_static_method_helper(void (*p_method)(P...), const void **p_args, IndexSequence<Is...>) {
|
|
|
|
p_method(PtrToArg<P>::convert(p_args[Is])...);
|
|
|
|
}
|
|
|
|
|
2020-10-15 15:29:59 +00:00
|
|
|
template <class T, class... P, size_t... Is>
|
|
|
|
void call_with_validated_variant_args_helper(T *p_instance, void (T::*p_method)(P...), const Variant **p_args, IndexSequence<Is...>) {
|
|
|
|
(p_instance->*p_method)((VariantInternalAccessor<typename GetSimpleTypeT<P>::type_t>::get(p_args[Is]))...);
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class... P, size_t... Is>
|
|
|
|
void call_with_validated_variant_argsc_helper(T *p_instance, void (T::*p_method)(P...) const, const Variant **p_args, IndexSequence<Is...>) {
|
|
|
|
(p_instance->*p_method)((VariantInternalAccessor<typename GetSimpleTypeT<P>::type_t>::get(p_args[Is]))...);
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class R, class... P, size_t... Is>
|
|
|
|
void call_with_validated_variant_args_ret_helper(T *p_instance, R (T::*p_method)(P...), const Variant **p_args, Variant *r_ret, IndexSequence<Is...>) {
|
|
|
|
VariantInternalAccessor<typename GetSimpleTypeT<R>::type_t>::set(r_ret, (p_instance->*p_method)((VariantInternalAccessor<typename GetSimpleTypeT<P>::type_t>::get(p_args[Is]))...));
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class R, class... P, size_t... Is>
|
|
|
|
void call_with_validated_variant_args_retc_helper(T *p_instance, R (T::*p_method)(P...) const, const Variant **p_args, Variant *r_ret, IndexSequence<Is...>) {
|
|
|
|
VariantInternalAccessor<typename GetSimpleTypeT<R>::type_t>::set(r_ret, (p_instance->*p_method)((VariantInternalAccessor<typename GetSimpleTypeT<P>::type_t>::get(p_args[Is]))...));
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class R, class... P, size_t... Is>
|
|
|
|
void call_with_validated_variant_args_static_retc_helper(T *p_instance, R (*p_method)(T *, P...), const Variant **p_args, Variant *r_ret, IndexSequence<Is...>) {
|
|
|
|
VariantInternalAccessor<typename GetSimpleTypeT<R>::type_t>::set(r_ret, p_method(p_instance, (VariantInternalAccessor<typename GetSimpleTypeT<P>::type_t>::get(p_args[Is]))...));
|
|
|
|
}
|
|
|
|
|
Add marshalling to PackedByteArray
-Decode/Encode functions for u8,s8,u16,s16,u32,s32,u64,s64,half,float,double,variant
-Improved binder template to allow this
Given in Godot 4.0 PackedByteArray is passed as reference, it is now possible to have these functions there, which makes the most sense.
2021-04-09 20:57:37 +00:00
|
|
|
template <class T, class... P, size_t... Is>
|
|
|
|
void call_with_validated_variant_args_static_helper(T *p_instance, void (*p_method)(T *, P...), const Variant **p_args, IndexSequence<Is...>) {
|
|
|
|
p_method(p_instance, (VariantInternalAccessor<typename GetSimpleTypeT<P>::type_t>::get(p_args[Is]))...);
|
|
|
|
}
|
|
|
|
|
2021-02-24 13:56:34 +00:00
|
|
|
template <class R, class... P, size_t... Is>
|
|
|
|
void call_with_validated_variant_args_static_method_ret_helper(R (*p_method)(P...), const Variant **p_args, Variant *r_ret, IndexSequence<Is...>) {
|
|
|
|
VariantInternalAccessor<typename GetSimpleTypeT<R>::type_t>::set(r_ret, p_method((VariantInternalAccessor<typename GetSimpleTypeT<P>::type_t>::get(p_args[Is]))...));
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class... P, size_t... Is>
|
|
|
|
void call_with_validated_variant_args_static_method_helper(void (*p_method)(P...), const Variant **p_args, IndexSequence<Is...>) {
|
|
|
|
p_method((VariantInternalAccessor<typename GetSimpleTypeT<P>::type_t>::get(p_args[Is]))...);
|
|
|
|
}
|
|
|
|
|
2020-10-15 15:29:59 +00:00
|
|
|
template <class T, class... P>
|
|
|
|
void call_with_variant_args(T *p_instance, void (T::*p_method)(P...), const Variant **p_args, int p_argcount, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ((size_t)p_argcount < sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
call_with_variant_args_helper<T, P...>(p_instance, p_method, p_args, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class... P>
|
|
|
|
void call_with_variant_args_dv(T *p_instance, void (T::*p_method)(P...), const Variant **p_args, int p_argcount, Callable::CallError &r_error, const Vector<Variant> &default_values) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_args_helper(p_instance, p_method, args, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class... P>
|
|
|
|
void call_with_variant_argsc(T *p_instance, void (T::*p_method)(P...) const, const Variant **p_args, int p_argcount, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ((size_t)p_argcount < sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
call_with_variant_args_helper<T, P...>(p_instance, p_method, p_args, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class... P>
|
|
|
|
void call_with_variant_argsc_dv(T *p_instance, void (T::*p_method)(P...) const, const Variant **p_args, int p_argcount, Callable::CallError &r_error, const Vector<Variant> &default_values) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_argsc_helper(p_instance, p_method, args, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class R, class... P>
|
|
|
|
void call_with_variant_args_ret_dv(T *p_instance, R (T::*p_method)(P...), const Variant **p_args, int p_argcount, Variant &r_ret, Callable::CallError &r_error, const Vector<Variant> &default_values) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_args_ret_helper(p_instance, p_method, args, r_ret, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class R, class... P>
|
|
|
|
void call_with_variant_args_retc_dv(T *p_instance, R (T::*p_method)(P...) const, const Variant **p_args, int p_argcount, Variant &r_ret, Callable::CallError &r_error, const Vector<Variant> &default_values) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_args_retc_helper(p_instance, p_method, args, r_ret, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class... P>
|
|
|
|
void call_with_ptr_args(T *p_instance, void (T::*p_method)(P...), const void **p_args) {
|
|
|
|
call_with_ptr_args_helper<T, P...>(p_instance, p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class... P>
|
|
|
|
void call_with_ptr_argsc(T *p_instance, void (T::*p_method)(P...) const, const void **p_args) {
|
|
|
|
call_with_ptr_argsc_helper<T, P...>(p_instance, p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class R, class... P>
|
|
|
|
void call_with_ptr_args_ret(T *p_instance, R (T::*p_method)(P...), const void **p_args, void *r_ret) {
|
|
|
|
call_with_ptr_args_ret_helper<T, R, P...>(p_instance, p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class R, class... P>
|
|
|
|
void call_with_ptr_args_retc(T *p_instance, R (T::*p_method)(P...) const, const void **p_args, void *r_ret) {
|
|
|
|
call_with_ptr_args_retc_helper<T, R, P...>(p_instance, p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
Add marshalling to PackedByteArray
-Decode/Encode functions for u8,s8,u16,s16,u32,s32,u64,s64,half,float,double,variant
-Improved binder template to allow this
Given in Godot 4.0 PackedByteArray is passed as reference, it is now possible to have these functions there, which makes the most sense.
2021-04-09 20:57:37 +00:00
|
|
|
template <class T, class... P>
|
|
|
|
void call_with_ptr_args_static(T *p_instance, void (*p_method)(T *, P...), const void **p_args) {
|
|
|
|
call_with_ptr_args_static_helper<T, P...>(p_instance, p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2020-10-15 15:29:59 +00:00
|
|
|
template <class T, class R, class... P>
|
|
|
|
void call_with_ptr_args_static_retc(T *p_instance, R (*p_method)(T *, P...), const void **p_args, void *r_ret) {
|
|
|
|
call_with_ptr_args_static_retc_helper<T, R, P...>(p_instance, p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2021-02-24 13:56:34 +00:00
|
|
|
template <class R, class... P>
|
|
|
|
void call_with_ptr_args_static_method_ret(R (*p_method)(P...), const void **p_args, void *r_ret) {
|
|
|
|
call_with_ptr_args_static_method_ret_helper<R, P...>(p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class... P>
|
|
|
|
void call_with_ptr_args_static_method(void (*p_method)(P...), const void **p_args) {
|
|
|
|
call_with_ptr_args_static_method_helper<P...>(p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2020-10-15 15:29:59 +00:00
|
|
|
template <class T, class... P>
|
|
|
|
void call_with_validated_variant_args(Variant *base, void (T::*p_method)(P...), const Variant **p_args) {
|
|
|
|
call_with_validated_variant_args_helper<T, P...>(VariantGetInternalPtr<T>::get_ptr(base), p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class R, class... P>
|
|
|
|
void call_with_validated_variant_args_ret(Variant *base, R (T::*p_method)(P...), const Variant **p_args, Variant *r_ret) {
|
|
|
|
call_with_validated_variant_args_ret_helper<T, R, P...>(VariantGetInternalPtr<T>::get_ptr(base), p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class R, class... P>
|
|
|
|
void call_with_validated_variant_args_retc(Variant *base, R (T::*p_method)(P...) const, const Variant **p_args, Variant *r_ret) {
|
|
|
|
call_with_validated_variant_args_retc_helper<T, R, P...>(VariantGetInternalPtr<T>::get_ptr(base), p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
Add marshalling to PackedByteArray
-Decode/Encode functions for u8,s8,u16,s16,u32,s32,u64,s64,half,float,double,variant
-Improved binder template to allow this
Given in Godot 4.0 PackedByteArray is passed as reference, it is now possible to have these functions there, which makes the most sense.
2021-04-09 20:57:37 +00:00
|
|
|
template <class T, class... P>
|
|
|
|
void call_with_validated_variant_args_static(Variant *base, void (*p_method)(T *, P...), const Variant **p_args) {
|
|
|
|
call_with_validated_variant_args_static_helper<T, P...>(VariantGetInternalPtr<T>::get_ptr(base), p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2020-10-15 15:29:59 +00:00
|
|
|
template <class T, class R, class... P>
|
|
|
|
void call_with_validated_variant_args_static_retc(Variant *base, R (*p_method)(T *, P...), const Variant **p_args, Variant *r_ret) {
|
|
|
|
call_with_validated_variant_args_static_retc_helper<T, R, P...>(VariantGetInternalPtr<T>::get_ptr(base), p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2021-02-24 13:56:34 +00:00
|
|
|
template <class... P>
|
|
|
|
void call_with_validated_variant_args_static_method(void (*p_method)(P...), const Variant **p_args) {
|
|
|
|
call_with_validated_variant_args_static_method_helper<P...>(p_method, p_args, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class R, class... P>
|
|
|
|
void call_with_validated_variant_args_static_method_ret(R (*p_method)(P...), const Variant **p_args, Variant *r_ret) {
|
|
|
|
call_with_validated_variant_args_static_method_ret_helper<R, P...>(p_method, p_args, r_ret, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2020-10-15 15:29:59 +00:00
|
|
|
// GCC raises "parameter 'p_args' set but not used" when P = {},
|
|
|
|
// it's not clever enough to treat other P values as making this branch valid.
|
2021-10-07 18:18:52 +00:00
|
|
|
#if defined(__GNUC__) && !defined(__clang__)
|
2020-10-15 15:29:59 +00:00
|
|
|
#pragma GCC diagnostic push
|
|
|
|
#pragma GCC diagnostic ignored "-Wunused-but-set-parameter"
|
|
|
|
#endif
|
|
|
|
|
|
|
|
template <class Q>
|
|
|
|
void call_get_argument_type_helper(int p_arg, int &index, Variant::Type &type) {
|
|
|
|
if (p_arg == index) {
|
|
|
|
type = GetTypeInfo<Q>::VARIANT_TYPE;
|
|
|
|
}
|
|
|
|
index++;
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class... P>
|
|
|
|
Variant::Type call_get_argument_type(int p_arg) {
|
|
|
|
Variant::Type type = Variant::NIL;
|
|
|
|
int index = 0;
|
|
|
|
// I think rocket science is simpler than modern C++.
|
|
|
|
using expand_type = int[];
|
|
|
|
expand_type a{ 0, (call_get_argument_type_helper<P>(p_arg, index, type), 0)... };
|
|
|
|
(void)a; // Suppress (valid, but unavoidable) -Wunused-variable warning.
|
|
|
|
(void)index; // Suppress GCC warning.
|
|
|
|
return type;
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class Q>
|
|
|
|
void call_get_argument_type_info_helper(int p_arg, int &index, PropertyInfo &info) {
|
|
|
|
if (p_arg == index) {
|
|
|
|
info = GetTypeInfo<Q>::get_class_info();
|
|
|
|
}
|
|
|
|
index++;
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class... P>
|
|
|
|
void call_get_argument_type_info(int p_arg, PropertyInfo &info) {
|
|
|
|
int index = 0;
|
|
|
|
// I think rocket science is simpler than modern C++.
|
|
|
|
using expand_type = int[];
|
|
|
|
expand_type a{ 0, (call_get_argument_type_info_helper<P>(p_arg, index, info), 0)... };
|
|
|
|
(void)a; // Suppress (valid, but unavoidable) -Wunused-variable warning.
|
|
|
|
(void)index; // Suppress GCC warning.
|
|
|
|
}
|
|
|
|
|
2021-10-07 18:18:52 +00:00
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
2020-10-15 15:29:59 +00:00
|
|
|
template <class Q>
|
|
|
|
void call_get_argument_metadata_helper(int p_arg, int &index, GodotTypeInfo::Metadata &md) {
|
|
|
|
if (p_arg == index) {
|
|
|
|
md = GetTypeInfo<Q>::METADATA;
|
|
|
|
}
|
|
|
|
index++;
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class... P>
|
|
|
|
GodotTypeInfo::Metadata call_get_argument_metadata(int p_arg) {
|
|
|
|
GodotTypeInfo::Metadata md = GodotTypeInfo::METADATA_NONE;
|
|
|
|
|
|
|
|
int index = 0;
|
|
|
|
// I think rocket science is simpler than modern C++.
|
|
|
|
using expand_type = int[];
|
|
|
|
expand_type a{ 0, (call_get_argument_metadata_helper<P>(p_arg, index, md), 0)... };
|
|
|
|
(void)a; // Suppress (valid, but unavoidable) -Wunused-variable warning.
|
|
|
|
(void)index;
|
|
|
|
return md;
|
|
|
|
}
|
|
|
|
|
|
|
|
#endif // DEBUG_METHODS_ENABLED
|
|
|
|
|
|
|
|
//////////////////////
|
|
|
|
|
|
|
|
template <class T, class R, class... P, size_t... Is>
|
|
|
|
void call_with_variant_args_ret_helper(T *p_instance, R (T::*p_method)(P...), const Variant **p_args, Variant &r_ret, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
r_ret = (p_instance->*p_method)(VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
r_ret = (p_instance->*p_method)(VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2021-02-24 13:56:34 +00:00
|
|
|
template <class R, class... P, size_t... Is>
|
|
|
|
void call_with_variant_args_static_ret(R (*p_method)(P...), const Variant **p_args, Variant &r_ret, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
r_ret = (p_method)(VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
r_ret = (p_method)(VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class... P, size_t... Is>
|
|
|
|
void call_with_variant_args_static(void (*p_method)(P...), const Variant **p_args, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
(p_method)(VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
(p_method)(VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
2020-10-15 15:29:59 +00:00
|
|
|
template <class T, class R, class... P>
|
|
|
|
void call_with_variant_args_ret(T *p_instance, R (T::*p_method)(P...), const Variant **p_args, int p_argcount, Variant &r_ret, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ((size_t)p_argcount < sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
call_with_variant_args_ret_helper<T, R, P...>(p_instance, p_method, p_args, r_ret, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class R, class... P, size_t... Is>
|
|
|
|
void call_with_variant_args_retc_helper(T *p_instance, R (T::*p_method)(P...) const, const Variant **p_args, Variant &r_ret, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
r_ret = (p_instance->*p_method)(VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
r_ret = (p_instance->*p_method)(VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
(void)p_args;
|
|
|
|
}
|
|
|
|
|
2021-02-24 13:56:34 +00:00
|
|
|
template <class R, class... P>
|
|
|
|
void call_with_variant_args_static_ret(R (*p_method)(P...), const Variant **p_args, int p_argcount, Variant &r_ret, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ((size_t)p_argcount < sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
call_with_variant_args_static_ret<R, P...>(p_method, p_args, r_ret, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class... P>
|
|
|
|
void call_with_variant_args_static_ret(void (*p_method)(P...), const Variant **p_args, int p_argcount, Variant &r_ret, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ((size_t)p_argcount < sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
call_with_variant_args_static<P...>(p_method, p_args, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2020-10-15 15:29:59 +00:00
|
|
|
template <class T, class R, class... P>
|
|
|
|
void call_with_variant_args_retc(T *p_instance, R (T::*p_method)(P...) const, const Variant **p_args, int p_argcount, Variant &r_ret, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if ((size_t)p_argcount < sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
call_with_variant_args_retc_helper<T, R, P...>(p_instance, p_method, p_args, r_ret, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class R, class... P, size_t... Is>
|
|
|
|
void call_with_variant_args_retc_static_helper(T *p_instance, R (*p_method)(T *, P...), const Variant **p_args, Variant &r_ret, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
r_ret = (p_method)(p_instance, VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
r_ret = (p_method)(p_instance, VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
(void)p_args;
|
|
|
|
}
|
|
|
|
|
2020-11-11 16:16:08 +00:00
|
|
|
template <class T, class R, class... P>
|
|
|
|
void call_with_variant_args_retc_static_helper_dv(T *p_instance, R (*p_method)(T *, P...), const Variant **p_args, int p_argcount, Variant &r_ret, const Vector<Variant> &default_values, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_args_retc_static_helper(p_instance, p_method, args, r_ret, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
Add marshalling to PackedByteArray
-Decode/Encode functions for u8,s8,u16,s16,u32,s32,u64,s64,half,float,double,variant
-Improved binder template to allow this
Given in Godot 4.0 PackedByteArray is passed as reference, it is now possible to have these functions there, which makes the most sense.
2021-04-09 20:57:37 +00:00
|
|
|
template <class T, class... P, size_t... Is>
|
|
|
|
void call_with_variant_args_static_helper(T *p_instance, void (*p_method)(T *, P...), const Variant **p_args, Callable::CallError &r_error, IndexSequence<Is...>) {
|
|
|
|
r_error.error = Callable::CallError::CALL_OK;
|
|
|
|
|
|
|
|
#ifdef DEBUG_METHODS_ENABLED
|
|
|
|
(p_method)(p_instance, VariantCasterAndValidate<P>::cast(p_args, Is, r_error)...);
|
|
|
|
#else
|
|
|
|
(p_method)(p_instance, VariantCaster<P>::cast(*p_args[Is])...);
|
|
|
|
#endif
|
|
|
|
|
|
|
|
(void)p_args;
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class T, class... P>
|
|
|
|
void call_with_variant_args_static_helper_dv(T *p_instance, void (*p_method)(T *, P...), const Variant **p_args, int p_argcount, const Vector<Variant> &default_values, Callable::CallError &r_error) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_args_static_helper(p_instance, p_method, args, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2021-02-24 13:56:34 +00:00
|
|
|
template <class R, class... P>
|
|
|
|
void call_with_variant_args_static_ret_dv(R (*p_method)(P...), const Variant **p_args, int p_argcount, Variant &r_ret, Callable::CallError &r_error, const Vector<Variant> &default_values) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_args_static_ret(p_method, args, r_ret, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
|
|
|
template <class... P>
|
|
|
|
void call_with_variant_args_static_dv(void (*p_method)(P...), const Variant **p_args, int p_argcount, Callable::CallError &r_error, const Vector<Variant> &default_values) {
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if ((size_t)p_argcount > sizeof...(P)) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
int32_t missing = (int32_t)sizeof...(P) - (int32_t)p_argcount;
|
|
|
|
|
|
|
|
int32_t dvs = default_values.size();
|
|
|
|
#ifdef DEBUG_ENABLED
|
|
|
|
if (missing > dvs) {
|
|
|
|
r_error.error = Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
|
|
|
|
r_error.argument = sizeof...(P);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
#endif
|
|
|
|
|
|
|
|
const Variant *args[sizeof...(P) == 0 ? 1 : sizeof...(P)]; //avoid zero sized array
|
|
|
|
for (int32_t i = 0; i < (int32_t)sizeof...(P); i++) {
|
|
|
|
if (i < p_argcount) {
|
|
|
|
args[i] = p_args[i];
|
|
|
|
} else {
|
|
|
|
args[i] = &default_values[i - p_argcount + (dvs - missing)];
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
call_with_variant_args_static(p_method, args, r_error, BuildIndexSequence<sizeof...(P)>{});
|
|
|
|
}
|
|
|
|
|
2021-10-07 18:18:52 +00:00
|
|
|
#if defined(__GNUC__) && !defined(__clang__)
|
2020-10-15 15:29:59 +00:00
|
|
|
#pragma GCC diagnostic pop
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#endif // BINDER_COMMON_H
|