Fix using uint suffix at the hex number declaration in shaders

This commit is contained in:
Yuri Roubinski 2023-07-01 14:26:05 +03:00
parent 46424488ed
commit 1994c25701
1 changed files with 8 additions and 1 deletions

View File

@ -659,7 +659,7 @@ ShaderLanguage::Token ShaderLanguage::_get_token() {
char t = char(i); char t = char(i);
suffix_lut[CASE_ALL][i] = t == '.' || t == 'x' || t == 'e' || t == 'f' || t == 'u' || t == '-' || t == '+'; suffix_lut[CASE_ALL][i] = t == '.' || t == 'x' || t == 'e' || t == 'f' || t == 'u' || t == '-' || t == '+';
suffix_lut[CASE_HEXA_PERIOD][i] = t == 'e' || t == 'f'; suffix_lut[CASE_HEXA_PERIOD][i] = t == 'e' || t == 'f' || t == 'u';
suffix_lut[CASE_EXPONENT][i] = t == 'f' || t == '-' || t == '+'; suffix_lut[CASE_EXPONENT][i] = t == 'f' || t == '-' || t == '+';
suffix_lut[CASE_SIGN_AFTER_EXPONENT][i] = t == 'f'; suffix_lut[CASE_SIGN_AFTER_EXPONENT][i] = t == 'f';
suffix_lut[CASE_NONE][i] = false; suffix_lut[CASE_NONE][i] = false;
@ -738,6 +738,13 @@ ShaderLanguage::Token ShaderLanguage::_get_token() {
char32_t last_char = str[str.length() - 1]; char32_t last_char = str[str.length() - 1];
if (hexa_found) { // Integer (hex). if (hexa_found) { // Integer (hex).
if (uint_suffix_found) {
// Strip the suffix.
str = str.left(str.length() - 1);
// Compensate reading cursor position.
char_idx += 1;
}
if (str.size() > 11 || !str.is_valid_hex_number(true)) { // > 0xFFFFFFFF if (str.size() > 11 || !str.is_valid_hex_number(true)) { // > 0xFFFFFFFF
return _make_token(TK_ERROR, "Invalid (hexadecimal) numeric constant"); return _make_token(TK_ERROR, "Invalid (hexadecimal) numeric constant");
} }