2021-01-27 08:29:44 -08:00
|
|
|
/*
|
2021-08-09 19:56:17 -07:00
|
|
|
* HLSL utility functions
|
|
|
|
*
|
2021-01-27 08:29:44 -08:00
|
|
|
* Copyright 2012 Matteo Bruni for CodeWeavers
|
|
|
|
* Copyright 2019-2020 Zebediah Figura for CodeWeavers
|
|
|
|
*
|
|
|
|
* This library is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Lesser General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2.1 of the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This library is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Lesser General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Lesser General Public
|
|
|
|
* License along with this library; if not, write to the Free Software
|
|
|
|
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include "hlsl.h"
|
2021-01-30 11:51:32 -08:00
|
|
|
#include <stdio.h>
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2021-12-01 08:14:57 -08:00
|
|
|
void hlsl_note(struct hlsl_ctx *ctx, const struct vkd3d_shader_location *loc,
|
2021-02-12 08:48:54 -08:00
|
|
|
enum vkd3d_shader_log_level level, const char *fmt, ...)
|
|
|
|
{
|
2021-02-21 20:04:38 -08:00
|
|
|
va_list args;
|
|
|
|
|
|
|
|
va_start(args, fmt);
|
2021-12-01 08:14:57 -08:00
|
|
|
vkd3d_shader_vnote(ctx->message_context, loc, level, fmt, args);
|
2021-02-21 20:04:38 -08:00
|
|
|
va_end(args);
|
2021-02-12 08:48:54 -08:00
|
|
|
}
|
|
|
|
|
2021-12-01 08:14:57 -08:00
|
|
|
void hlsl_error(struct hlsl_ctx *ctx, const struct vkd3d_shader_location *loc,
|
2021-02-21 20:04:36 -08:00
|
|
|
enum vkd3d_shader_error error, const char *fmt, ...)
|
2021-02-12 08:48:54 -08:00
|
|
|
{
|
2021-02-21 20:04:36 -08:00
|
|
|
va_list args;
|
|
|
|
|
|
|
|
va_start(args, fmt);
|
2021-12-01 08:14:57 -08:00
|
|
|
vkd3d_shader_verror(ctx->message_context, loc, error, fmt, args);
|
2021-02-21 20:04:36 -08:00
|
|
|
va_end(args);
|
2021-02-12 08:48:54 -08:00
|
|
|
|
2021-05-20 22:32:23 -07:00
|
|
|
if (!ctx->result)
|
|
|
|
ctx->result = VKD3D_ERROR_INVALID_SHADER;
|
2021-02-12 08:48:54 -08:00
|
|
|
}
|
|
|
|
|
2021-12-01 08:14:57 -08:00
|
|
|
void hlsl_warning(struct hlsl_ctx *ctx, const struct vkd3d_shader_location *loc,
|
2021-02-21 20:04:36 -08:00
|
|
|
enum vkd3d_shader_error error, const char *fmt, ...)
|
2021-02-02 14:11:13 -08:00
|
|
|
{
|
2021-02-21 20:04:36 -08:00
|
|
|
va_list args;
|
|
|
|
|
|
|
|
va_start(args, fmt);
|
2021-12-01 08:14:57 -08:00
|
|
|
vkd3d_shader_vwarning(ctx->message_context, loc, error, fmt, args);
|
2021-02-21 20:04:36 -08:00
|
|
|
va_end(args);
|
2021-02-02 14:11:13 -08:00
|
|
|
}
|
|
|
|
|
2021-12-01 08:14:57 -08:00
|
|
|
void hlsl_fixme(struct hlsl_ctx *ctx, const struct vkd3d_shader_location *loc, const char *fmt, ...)
|
2021-08-13 07:03:24 -07:00
|
|
|
{
|
|
|
|
struct vkd3d_string_buffer *string;
|
|
|
|
va_list args;
|
|
|
|
|
|
|
|
va_start(args, fmt);
|
|
|
|
string = hlsl_get_string_buffer(ctx);
|
|
|
|
vkd3d_string_buffer_printf(string, "Aborting due to not yet implemented feature: ");
|
|
|
|
vkd3d_string_buffer_vprintf(string, fmt, args);
|
2021-12-01 08:14:57 -08:00
|
|
|
vkd3d_shader_error(ctx->message_context, loc, VKD3D_SHADER_ERROR_HLSL_NOT_IMPLEMENTED, "%s", string->buffer);
|
2021-08-13 07:03:24 -07:00
|
|
|
hlsl_release_string_buffer(ctx, string);
|
|
|
|
va_end(args);
|
|
|
|
|
|
|
|
if (!ctx->result)
|
|
|
|
ctx->result = VKD3D_ERROR_NOT_IMPLEMENTED;
|
|
|
|
}
|
|
|
|
|
2023-08-07 14:45:31 -07:00
|
|
|
char *hlsl_sprintf_alloc(struct hlsl_ctx *ctx, const char *fmt, ...)
|
|
|
|
{
|
|
|
|
struct vkd3d_string_buffer *string;
|
|
|
|
va_list args;
|
|
|
|
char *ret;
|
|
|
|
|
|
|
|
if (!(string = hlsl_get_string_buffer(ctx)))
|
|
|
|
return NULL;
|
|
|
|
va_start(args, fmt);
|
|
|
|
if (vkd3d_string_buffer_vprintf(string, fmt, args) < 0)
|
|
|
|
{
|
|
|
|
va_end(args);
|
|
|
|
hlsl_release_string_buffer(ctx, string);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
va_end(args);
|
|
|
|
ret = hlsl_strdup(ctx, string->buffer);
|
|
|
|
hlsl_release_string_buffer(ctx, string);
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2021-02-04 14:33:53 -08:00
|
|
|
bool hlsl_add_var(struct hlsl_ctx *ctx, struct hlsl_ir_var *decl, bool local_var)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2021-02-04 14:33:53 -08:00
|
|
|
struct hlsl_scope *scope = ctx->cur_scope;
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_ir_var *var;
|
|
|
|
|
|
|
|
LIST_FOR_EACH_ENTRY(var, &scope->vars, struct hlsl_ir_var, scope_entry)
|
|
|
|
{
|
|
|
|
if (!strcmp(decl->name, var->name))
|
2021-02-02 14:11:17 -08:00
|
|
|
return false;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
2021-02-04 14:33:53 -08:00
|
|
|
if (local_var && scope->upper->upper == ctx->globals)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
/* Check whether the variable redefines a function parameter. */
|
|
|
|
LIST_FOR_EACH_ENTRY(var, &scope->upper->vars, struct hlsl_ir_var, scope_entry)
|
|
|
|
{
|
|
|
|
if (!strcmp(decl->name, var->name))
|
2021-02-02 14:11:17 -08:00
|
|
|
return false;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
list_add_tail(&scope->vars, &decl->scope_entry);
|
2021-02-02 14:11:17 -08:00
|
|
|
return true;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:14 -08:00
|
|
|
struct hlsl_ir_var *hlsl_get_var(struct hlsl_scope *scope, const char *name)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
struct hlsl_ir_var *var;
|
|
|
|
|
|
|
|
LIST_FOR_EACH_ENTRY(var, &scope->vars, struct hlsl_ir_var, scope_entry)
|
|
|
|
{
|
|
|
|
if (!strcmp(name, var->name))
|
|
|
|
return var;
|
|
|
|
}
|
|
|
|
if (!scope->upper)
|
|
|
|
return NULL;
|
2021-02-02 14:11:14 -08:00
|
|
|
return hlsl_get_var(scope->upper, name);
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:14 -08:00
|
|
|
void hlsl_free_var(struct hlsl_ir_var *decl)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2022-11-25 14:47:56 -08:00
|
|
|
unsigned int k;
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
vkd3d_free((void *)decl->name);
|
2023-01-31 17:44:46 -08:00
|
|
|
hlsl_cleanup_semantic(&decl->semantic);
|
2022-11-25 14:47:56 -08:00
|
|
|
for (k = 0; k <= HLSL_REGSET_LAST_OBJECT; ++k)
|
|
|
|
vkd3d_free((void *)decl->objects_usage[k]);
|
2021-01-27 08:29:44 -08:00
|
|
|
vkd3d_free(decl);
|
|
|
|
}
|
|
|
|
|
2022-06-30 10:16:37 -07:00
|
|
|
bool hlsl_type_is_row_major(const struct hlsl_type *type)
|
2021-06-23 21:57:32 -07:00
|
|
|
{
|
|
|
|
/* Default to column-major if the majority isn't explicitly set, which can
|
|
|
|
* happen for anonymous nodes. */
|
|
|
|
return !!(type->modifiers & HLSL_MODIFIER_ROW_MAJOR);
|
|
|
|
}
|
|
|
|
|
2022-06-30 13:25:12 -07:00
|
|
|
unsigned int hlsl_type_minor_size(const struct hlsl_type *type)
|
|
|
|
{
|
2022-11-11 17:31:55 -08:00
|
|
|
if (type->class != HLSL_CLASS_MATRIX || hlsl_type_is_row_major(type))
|
2022-06-30 13:25:12 -07:00
|
|
|
return type->dimx;
|
|
|
|
else
|
|
|
|
return type->dimy;
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned int hlsl_type_major_size(const struct hlsl_type *type)
|
|
|
|
{
|
2022-11-11 17:31:55 -08:00
|
|
|
if (type->class != HLSL_CLASS_MATRIX || hlsl_type_is_row_major(type))
|
2022-06-30 13:25:12 -07:00
|
|
|
return type->dimy;
|
|
|
|
else
|
|
|
|
return type->dimx;
|
|
|
|
}
|
|
|
|
|
2022-07-20 14:42:13 -07:00
|
|
|
unsigned int hlsl_type_element_count(const struct hlsl_type *type)
|
|
|
|
{
|
2022-11-11 17:31:55 -08:00
|
|
|
switch (type->class)
|
2022-07-20 14:42:13 -07:00
|
|
|
{
|
|
|
|
case HLSL_CLASS_VECTOR:
|
|
|
|
return type->dimx;
|
|
|
|
case HLSL_CLASS_MATRIX:
|
|
|
|
return hlsl_type_major_size(type);
|
|
|
|
case HLSL_CLASS_ARRAY:
|
|
|
|
return type->e.array.elements_count;
|
|
|
|
case HLSL_CLASS_STRUCT:
|
|
|
|
return type->e.record.field_count;
|
|
|
|
default:
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-24 17:41:15 -07:00
|
|
|
const struct hlsl_type *hlsl_get_multiarray_element_type(const struct hlsl_type *type)
|
2021-06-23 21:57:33 -07:00
|
|
|
{
|
2022-11-11 17:31:55 -08:00
|
|
|
if (type->class == HLSL_CLASS_ARRAY)
|
2023-04-24 17:41:15 -07:00
|
|
|
return hlsl_get_multiarray_element_type(type->e.array.type);
|
|
|
|
return type;
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned int hlsl_get_multiarray_size(const struct hlsl_type *type)
|
|
|
|
{
|
|
|
|
if (type->class == HLSL_CLASS_ARRAY)
|
|
|
|
return hlsl_get_multiarray_size(type->e.array.type) * type->e.array.elements_count;
|
2021-06-23 21:57:33 -07:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2022-10-28 08:23:05 -07:00
|
|
|
bool hlsl_type_is_resource(const struct hlsl_type *type)
|
|
|
|
{
|
2023-04-25 09:41:38 -07:00
|
|
|
if (type->class == HLSL_CLASS_ARRAY)
|
|
|
|
return hlsl_type_is_resource(type->e.array.type);
|
|
|
|
|
2022-11-11 17:31:55 -08:00
|
|
|
if (type->class == HLSL_CLASS_OBJECT)
|
2022-10-28 08:23:05 -07:00
|
|
|
{
|
|
|
|
switch (type->base_type)
|
|
|
|
{
|
|
|
|
case HLSL_TYPE_TEXTURE:
|
|
|
|
case HLSL_TYPE_SAMPLER:
|
|
|
|
case HLSL_TYPE_UAV:
|
|
|
|
return true;
|
|
|
|
default:
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2023-06-12 08:58:07 -07:00
|
|
|
/* Only intended to be used for derefs (after copies have been lowered to components or vectors) or
|
|
|
|
* resources, since for both their data types span across a single regset. */
|
|
|
|
static enum hlsl_regset type_get_regset(const struct hlsl_type *type)
|
2022-10-28 08:23:05 -07:00
|
|
|
{
|
2022-11-11 17:31:55 -08:00
|
|
|
if (type->class <= HLSL_CLASS_LAST_NUMERIC)
|
2022-10-28 08:23:05 -07:00
|
|
|
return HLSL_REGSET_NUMERIC;
|
|
|
|
|
2023-04-25 09:41:38 -07:00
|
|
|
if (type->class == HLSL_CLASS_ARRAY)
|
2023-06-12 08:58:07 -07:00
|
|
|
return type_get_regset(type->e.array.type);
|
2023-04-25 09:41:38 -07:00
|
|
|
|
2022-11-11 17:31:55 -08:00
|
|
|
if (type->class == HLSL_CLASS_OBJECT)
|
2022-10-28 08:23:05 -07:00
|
|
|
{
|
|
|
|
switch (type->base_type)
|
|
|
|
{
|
|
|
|
case HLSL_TYPE_TEXTURE:
|
|
|
|
return HLSL_REGSET_TEXTURES;
|
|
|
|
|
|
|
|
case HLSL_TYPE_SAMPLER:
|
|
|
|
return HLSL_REGSET_SAMPLERS;
|
|
|
|
|
|
|
|
case HLSL_TYPE_UAV:
|
|
|
|
return HLSL_REGSET_UAVS;
|
|
|
|
|
|
|
|
default:
|
|
|
|
vkd3d_unreachable();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
vkd3d_unreachable();
|
|
|
|
}
|
|
|
|
|
2023-06-12 08:58:07 -07:00
|
|
|
enum hlsl_regset hlsl_deref_get_regset(struct hlsl_ctx *ctx, const struct hlsl_deref *deref)
|
|
|
|
{
|
2023-10-04 11:31:46 -07:00
|
|
|
return type_get_regset(hlsl_deref_get_type(ctx, deref));
|
2023-06-12 08:58:07 -07:00
|
|
|
}
|
|
|
|
|
2021-06-23 21:57:35 -07:00
|
|
|
unsigned int hlsl_type_get_sm4_offset(const struct hlsl_type *type, unsigned int offset)
|
|
|
|
{
|
|
|
|
/* Align to the next vec4 boundary if:
|
|
|
|
* (a) the type is a struct or array type, or
|
|
|
|
* (b) the type would cross a vec4 boundary; i.e. a vec3 and a
|
|
|
|
* vec1 can be packed together, but not a vec3 and a vec2.
|
|
|
|
*/
|
2023-04-11 09:01:02 -07:00
|
|
|
if (type->class == HLSL_CLASS_STRUCT || type->class == HLSL_CLASS_ARRAY
|
|
|
|
|| (offset & 3) + type->reg_size[HLSL_REGSET_NUMERIC] > 4)
|
2021-06-23 21:57:35 -07:00
|
|
|
return align(offset, 4);
|
|
|
|
return offset;
|
|
|
|
}
|
|
|
|
|
2021-06-23 21:57:34 -07:00
|
|
|
static void hlsl_type_calculate_reg_size(struct hlsl_ctx *ctx, struct hlsl_type *type)
|
2021-06-23 21:57:33 -07:00
|
|
|
{
|
2021-06-23 21:57:34 -07:00
|
|
|
bool is_sm4 = (ctx->profile->major_version >= 4);
|
2022-10-28 08:23:05 -07:00
|
|
|
unsigned int k;
|
|
|
|
|
|
|
|
for (k = 0; k <= HLSL_REGSET_LAST; ++k)
|
|
|
|
type->reg_size[k] = 0;
|
2021-06-23 21:57:34 -07:00
|
|
|
|
2022-11-11 17:31:55 -08:00
|
|
|
switch (type->class)
|
2021-06-23 21:57:33 -07:00
|
|
|
{
|
|
|
|
case HLSL_CLASS_SCALAR:
|
|
|
|
case HLSL_CLASS_VECTOR:
|
2022-10-28 08:23:05 -07:00
|
|
|
type->reg_size[HLSL_REGSET_NUMERIC] = is_sm4 ? type->dimx : 4;
|
2021-06-23 21:57:33 -07:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_CLASS_MATRIX:
|
2021-06-23 21:57:34 -07:00
|
|
|
if (hlsl_type_is_row_major(type))
|
2022-10-28 08:23:05 -07:00
|
|
|
type->reg_size[HLSL_REGSET_NUMERIC] = is_sm4 ? (4 * (type->dimy - 1) + type->dimx) : (4 * type->dimy);
|
2021-06-23 21:57:34 -07:00
|
|
|
else
|
2022-10-28 08:23:05 -07:00
|
|
|
type->reg_size[HLSL_REGSET_NUMERIC] = is_sm4 ? (4 * (type->dimx - 1) + type->dimy) : (4 * type->dimx);
|
2021-06-23 21:57:33 -07:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_CLASS_ARRAY:
|
2021-06-23 21:57:34 -07:00
|
|
|
{
|
2022-08-08 15:11:03 -07:00
|
|
|
if (type->e.array.elements_count == HLSL_ARRAY_ELEMENTS_COUNT_IMPLICIT)
|
2022-10-28 08:23:05 -07:00
|
|
|
break;
|
|
|
|
|
|
|
|
for (k = 0; k <= HLSL_REGSET_LAST; ++k)
|
|
|
|
{
|
|
|
|
unsigned int element_size = type->e.array.type->reg_size[k];
|
|
|
|
|
|
|
|
if (is_sm4 && k == HLSL_REGSET_NUMERIC)
|
|
|
|
type->reg_size[k] = (type->e.array.elements_count - 1) * align(element_size, 4) + element_size;
|
|
|
|
else
|
|
|
|
type->reg_size[k] = type->e.array.elements_count * element_size;
|
|
|
|
}
|
|
|
|
|
2021-06-23 21:57:33 -07:00
|
|
|
break;
|
2021-06-23 21:57:34 -07:00
|
|
|
}
|
2021-06-23 21:57:33 -07:00
|
|
|
|
|
|
|
case HLSL_CLASS_STRUCT:
|
|
|
|
{
|
2022-07-14 18:23:43 -07:00
|
|
|
unsigned int i;
|
2021-06-23 21:57:33 -07:00
|
|
|
|
|
|
|
type->dimx = 0;
|
2022-07-14 18:23:43 -07:00
|
|
|
for (i = 0; i < type->e.record.field_count; ++i)
|
2021-06-23 21:57:33 -07:00
|
|
|
{
|
2022-07-14 18:23:43 -07:00
|
|
|
struct hlsl_struct_field *field = &type->e.record.fields[i];
|
2021-06-23 21:57:34 -07:00
|
|
|
|
2022-10-28 08:23:05 -07:00
|
|
|
for (k = 0; k <= HLSL_REGSET_LAST; ++k)
|
|
|
|
{
|
|
|
|
if (k == HLSL_REGSET_NUMERIC)
|
|
|
|
type->reg_size[k] = hlsl_type_get_sm4_offset(field->type, type->reg_size[k]);
|
|
|
|
field->reg_offset[k] = type->reg_size[k];
|
|
|
|
type->reg_size[k] += field->type->reg_size[k];
|
|
|
|
}
|
2021-06-23 21:57:34 -07:00
|
|
|
|
2023-04-24 17:41:15 -07:00
|
|
|
type->dimx += field->type->dimx * field->type->dimy * hlsl_get_multiarray_size(field->type);
|
2021-06-23 21:57:33 -07:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
case HLSL_CLASS_OBJECT:
|
2022-10-28 08:23:05 -07:00
|
|
|
{
|
|
|
|
if (hlsl_type_is_resource(type))
|
|
|
|
{
|
2023-06-12 08:58:07 -07:00
|
|
|
enum hlsl_regset regset = type_get_regset(type);
|
2022-10-28 08:23:05 -07:00
|
|
|
|
|
|
|
type->reg_size[regset] = 1;
|
|
|
|
}
|
2021-06-23 21:57:33 -07:00
|
|
|
break;
|
2022-10-28 08:23:05 -07:00
|
|
|
}
|
2021-06-23 21:57:33 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-10-28 08:23:05 -07:00
|
|
|
/* Returns the size of a type, considered as part of an array of that type, within a specific
|
|
|
|
* register set. As such it includes padding after the type, when applicable. */
|
|
|
|
unsigned int hlsl_type_get_array_element_reg_size(const struct hlsl_type *type, enum hlsl_regset regset)
|
2022-03-17 13:58:35 -07:00
|
|
|
{
|
2022-10-28 08:23:05 -07:00
|
|
|
if (regset == HLSL_REGSET_NUMERIC)
|
|
|
|
return align(type->reg_size[regset], 4);
|
|
|
|
return type->reg_size[regset];
|
2022-03-17 13:58:35 -07:00
|
|
|
}
|
|
|
|
|
2021-09-27 18:51:46 -07:00
|
|
|
static struct hlsl_type *hlsl_new_type(struct hlsl_ctx *ctx, const char *name, enum hlsl_type_class type_class,
|
2021-01-27 08:29:44 -08:00
|
|
|
enum hlsl_base_type base_type, unsigned dimx, unsigned dimy)
|
|
|
|
{
|
|
|
|
struct hlsl_type *type;
|
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
if (!(type = hlsl_alloc(ctx, sizeof(*type))))
|
2021-01-27 08:29:44 -08:00
|
|
|
return NULL;
|
2021-09-27 18:51:47 -07:00
|
|
|
if (!(type->name = hlsl_strdup(ctx, name)))
|
|
|
|
{
|
|
|
|
vkd3d_free(type);
|
|
|
|
return NULL;
|
|
|
|
}
|
2022-11-11 17:31:55 -08:00
|
|
|
type->class = type_class;
|
2021-01-27 08:29:44 -08:00
|
|
|
type->base_type = base_type;
|
|
|
|
type->dimx = dimx;
|
|
|
|
type->dimy = dimy;
|
2021-06-23 21:57:34 -07:00
|
|
|
hlsl_type_calculate_reg_size(ctx, type);
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2021-02-04 14:33:53 -08:00
|
|
|
list_add_tail(&ctx->types, &type->entry);
|
2021-01-27 08:29:44 -08:00
|
|
|
|
|
|
|
return type;
|
|
|
|
}
|
|
|
|
|
2022-06-30 15:20:20 -07:00
|
|
|
static bool type_is_single_component(const struct hlsl_type *type)
|
|
|
|
{
|
2022-11-11 17:31:55 -08:00
|
|
|
return type->class == HLSL_CLASS_SCALAR || type->class == HLSL_CLASS_OBJECT;
|
2022-06-30 15:20:20 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Given a type and a component index, this function moves one step through the path required to
|
|
|
|
* reach that component within the type.
|
|
|
|
* It returns the first index of this path.
|
|
|
|
* It sets *type_ptr to the (outermost) type within the original type that contains the component.
|
|
|
|
* It sets *index_ptr to the index of the component within *type_ptr.
|
|
|
|
* So, this function can be called several times in sequence to obtain all the path's indexes until
|
|
|
|
* the component is finally reached. */
|
|
|
|
static unsigned int traverse_path_from_component_index(struct hlsl_ctx *ctx,
|
|
|
|
struct hlsl_type **type_ptr, unsigned int *index_ptr)
|
|
|
|
{
|
|
|
|
struct hlsl_type *type = *type_ptr;
|
|
|
|
unsigned int index = *index_ptr;
|
|
|
|
|
|
|
|
assert(!type_is_single_component(type));
|
|
|
|
assert(index < hlsl_type_component_count(type));
|
|
|
|
|
2022-11-11 17:31:55 -08:00
|
|
|
switch (type->class)
|
2022-06-30 15:20:20 -07:00
|
|
|
{
|
|
|
|
case HLSL_CLASS_VECTOR:
|
|
|
|
assert(index < type->dimx);
|
|
|
|
*type_ptr = hlsl_get_scalar_type(ctx, type->base_type);
|
|
|
|
*index_ptr = 0;
|
|
|
|
return index;
|
|
|
|
|
|
|
|
case HLSL_CLASS_MATRIX:
|
|
|
|
{
|
|
|
|
unsigned int y = index / type->dimx, x = index % type->dimx;
|
|
|
|
bool row_major = hlsl_type_is_row_major(type);
|
|
|
|
|
|
|
|
assert(index < type->dimx * type->dimy);
|
|
|
|
*type_ptr = hlsl_get_vector_type(ctx, type->base_type, row_major ? type->dimx : type->dimy);
|
|
|
|
*index_ptr = row_major ? x : y;
|
|
|
|
return row_major ? y : x;
|
|
|
|
}
|
|
|
|
|
|
|
|
case HLSL_CLASS_ARRAY:
|
|
|
|
{
|
|
|
|
unsigned int elem_comp_count = hlsl_type_component_count(type->e.array.type);
|
|
|
|
unsigned int array_index;
|
|
|
|
|
|
|
|
*type_ptr = type->e.array.type;
|
|
|
|
*index_ptr = index % elem_comp_count;
|
|
|
|
array_index = index / elem_comp_count;
|
|
|
|
assert(array_index < type->e.array.elements_count);
|
|
|
|
return array_index;
|
|
|
|
}
|
|
|
|
|
|
|
|
case HLSL_CLASS_STRUCT:
|
|
|
|
{
|
|
|
|
struct hlsl_struct_field *field;
|
|
|
|
unsigned int field_comp_count, i;
|
|
|
|
|
|
|
|
for (i = 0; i < type->e.record.field_count; ++i)
|
|
|
|
{
|
|
|
|
field = &type->e.record.fields[i];
|
|
|
|
field_comp_count = hlsl_type_component_count(field->type);
|
|
|
|
if (index < field_comp_count)
|
|
|
|
{
|
|
|
|
*type_ptr = field->type;
|
|
|
|
*index_ptr = index;
|
|
|
|
return i;
|
|
|
|
}
|
|
|
|
index -= field_comp_count;
|
|
|
|
}
|
2022-08-31 04:25:24 -07:00
|
|
|
vkd3d_unreachable();
|
2022-06-30 15:20:20 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
default:
|
2022-08-31 04:25:24 -07:00
|
|
|
vkd3d_unreachable();
|
2022-06-30 15:20:20 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
struct hlsl_type *hlsl_type_get_component_type(struct hlsl_ctx *ctx, struct hlsl_type *type,
|
|
|
|
unsigned int index)
|
|
|
|
{
|
|
|
|
while (!type_is_single_component(type))
|
|
|
|
traverse_path_from_component_index(ctx, &type, &index);
|
|
|
|
|
|
|
|
return type;
|
|
|
|
}
|
|
|
|
|
2023-05-29 16:51:10 -07:00
|
|
|
unsigned int hlsl_type_get_component_offset(struct hlsl_ctx *ctx, struct hlsl_type *type,
|
2023-06-09 13:55:21 -07:00
|
|
|
unsigned int index, enum hlsl_regset *regset)
|
2023-05-29 16:51:10 -07:00
|
|
|
{
|
2023-06-09 13:55:21 -07:00
|
|
|
unsigned int offset[HLSL_REGSET_LAST + 1] = {0};
|
2023-05-29 16:51:10 -07:00
|
|
|
struct hlsl_type *next_type;
|
2023-06-09 13:55:21 -07:00
|
|
|
unsigned int idx, r;
|
2023-05-29 16:51:10 -07:00
|
|
|
|
|
|
|
while (!type_is_single_component(type))
|
|
|
|
{
|
|
|
|
next_type = type;
|
|
|
|
idx = traverse_path_from_component_index(ctx, &next_type, &index);
|
|
|
|
|
|
|
|
switch (type->class)
|
|
|
|
{
|
|
|
|
case HLSL_CLASS_SCALAR:
|
|
|
|
case HLSL_CLASS_VECTOR:
|
|
|
|
case HLSL_CLASS_MATRIX:
|
2023-06-09 13:55:21 -07:00
|
|
|
offset[HLSL_REGSET_NUMERIC] += idx;
|
2023-05-29 16:51:10 -07:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_CLASS_STRUCT:
|
2023-06-09 13:55:21 -07:00
|
|
|
for (r = 0; r <= HLSL_REGSET_LAST; ++r)
|
|
|
|
offset[r] += type->e.record.fields[idx].reg_offset[r];
|
2023-05-29 16:51:10 -07:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_CLASS_ARRAY:
|
2023-06-09 13:55:21 -07:00
|
|
|
for (r = 0; r <= HLSL_REGSET_LAST; ++r)
|
|
|
|
{
|
|
|
|
if (r == HLSL_REGSET_NUMERIC)
|
|
|
|
offset[r] += idx * align(type->e.array.type->reg_size[r], 4);
|
|
|
|
else
|
|
|
|
offset[r] += idx * type->e.array.type->reg_size[r];
|
|
|
|
}
|
2023-05-29 16:51:10 -07:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_CLASS_OBJECT:
|
|
|
|
assert(idx == 0);
|
|
|
|
break;
|
|
|
|
|
|
|
|
default:
|
|
|
|
vkd3d_unreachable();
|
|
|
|
}
|
|
|
|
type = next_type;
|
|
|
|
}
|
|
|
|
|
2023-06-12 08:58:07 -07:00
|
|
|
*regset = type_get_regset(type);
|
2023-06-09 13:55:21 -07:00
|
|
|
return offset[*regset];
|
2023-05-29 16:51:10 -07:00
|
|
|
}
|
|
|
|
|
2022-06-30 15:20:20 -07:00
|
|
|
static bool init_deref(struct hlsl_ctx *ctx, struct hlsl_deref *deref, struct hlsl_ir_var *var,
|
|
|
|
unsigned int path_len)
|
|
|
|
{
|
|
|
|
deref->var = var;
|
|
|
|
deref->path_len = path_len;
|
2023-10-06 09:56:24 -07:00
|
|
|
deref->rel_offset.node = NULL;
|
vkd3d-shader/hlsl: Split deref-offset into a node and a constant uint.
This uint will be used for the following:
- Since SM4's relative addressing (the capability of passing a register
as an index to another register) only has whole-register granularity,
we will need to make the offset node express the offset in
whole-registers and specify the register component in this uint,
otherwise we would have to add additional / and % operations in the
output binary.
- If, after we apply constant folding and copy propagation, we determine
that the offset is a single constant node, we can store all the offset
in this uint constant, and remove the offset src.
This allows DCE to remove a good bunch of the nodes previously required
only for the offset constants, which makes the output more liteweight
and readable, and simplifies the implementation of relative addressing
when writing tpf in the following patches.
In dump_deref(), we use "c" to indicate components instead of whole
registers. Since now both the offset node and the offset uint are in
components a lowered deref would look like:
var[@42c + 2c]
But, once we express the offset node in whole registers we will remove
the "c" from the node part:
var[@22 + 3c]
2023-10-03 12:47:13 -07:00
|
|
|
deref->const_offset = 0;
|
2023-10-04 11:31:46 -07:00
|
|
|
deref->data_type = NULL;
|
2022-06-30 15:20:20 -07:00
|
|
|
|
|
|
|
if (path_len == 0)
|
|
|
|
{
|
|
|
|
deref->path = NULL;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2023-05-03 12:39:58 -07:00
|
|
|
if (!(deref->path = hlsl_calloc(ctx, deref->path_len, sizeof(*deref->path))))
|
2022-06-30 15:20:20 -07:00
|
|
|
{
|
|
|
|
deref->var = NULL;
|
|
|
|
deref->path_len = 0;
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2023-03-10 17:09:58 -08:00
|
|
|
bool hlsl_init_deref_from_index_chain(struct hlsl_ctx *ctx, struct hlsl_deref *deref, struct hlsl_ir_node *chain)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_index *index;
|
|
|
|
struct hlsl_ir_load *load;
|
|
|
|
unsigned int chain_len, i;
|
|
|
|
struct hlsl_ir_node *ptr;
|
|
|
|
|
|
|
|
deref->path = NULL;
|
|
|
|
deref->path_len = 0;
|
2023-10-06 09:56:24 -07:00
|
|
|
deref->rel_offset.node = NULL;
|
vkd3d-shader/hlsl: Split deref-offset into a node and a constant uint.
This uint will be used for the following:
- Since SM4's relative addressing (the capability of passing a register
as an index to another register) only has whole-register granularity,
we will need to make the offset node express the offset in
whole-registers and specify the register component in this uint,
otherwise we would have to add additional / and % operations in the
output binary.
- If, after we apply constant folding and copy propagation, we determine
that the offset is a single constant node, we can store all the offset
in this uint constant, and remove the offset src.
This allows DCE to remove a good bunch of the nodes previously required
only for the offset constants, which makes the output more liteweight
and readable, and simplifies the implementation of relative addressing
when writing tpf in the following patches.
In dump_deref(), we use "c" to indicate components instead of whole
registers. Since now both the offset node and the offset uint are in
components a lowered deref would look like:
var[@42c + 2c]
But, once we express the offset node in whole registers we will remove
the "c" from the node part:
var[@22 + 3c]
2023-10-03 12:47:13 -07:00
|
|
|
deref->const_offset = 0;
|
2023-03-10 17:09:58 -08:00
|
|
|
|
|
|
|
assert(chain);
|
|
|
|
if (chain->type == HLSL_IR_INDEX)
|
|
|
|
assert(!hlsl_index_is_noncontiguous(hlsl_ir_index(chain)));
|
|
|
|
|
|
|
|
/* Find the length of the index chain */
|
|
|
|
chain_len = 0;
|
|
|
|
ptr = chain;
|
|
|
|
while (ptr->type == HLSL_IR_INDEX)
|
|
|
|
{
|
|
|
|
index = hlsl_ir_index(ptr);
|
|
|
|
|
|
|
|
chain_len++;
|
|
|
|
ptr = index->val.node;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (ptr->type != HLSL_IR_LOAD)
|
|
|
|
{
|
|
|
|
hlsl_error(ctx, &chain->loc, VKD3D_SHADER_ERROR_HLSL_INVALID_LVALUE, "Invalid l-value.");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
load = hlsl_ir_load(ptr);
|
|
|
|
|
|
|
|
if (!init_deref(ctx, deref, load->src.var, load->src.path_len + chain_len))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
for (i = 0; i < load->src.path_len; ++i)
|
|
|
|
hlsl_src_from_node(&deref->path[i], load->src.path[i].node);
|
|
|
|
|
|
|
|
chain_len = 0;
|
|
|
|
ptr = chain;
|
|
|
|
while (ptr->type == HLSL_IR_INDEX)
|
|
|
|
{
|
|
|
|
unsigned int p = deref->path_len - 1 - chain_len;
|
|
|
|
|
|
|
|
index = hlsl_ir_index(ptr);
|
|
|
|
if (hlsl_index_is_noncontiguous(index))
|
|
|
|
{
|
|
|
|
hlsl_src_from_node(&deref->path[p], deref->path[p + 1].node);
|
|
|
|
hlsl_src_remove(&deref->path[p + 1]);
|
|
|
|
hlsl_src_from_node(&deref->path[p + 1], index->idx.node);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
hlsl_src_from_node(&deref->path[p], index->idx.node);
|
|
|
|
}
|
|
|
|
|
|
|
|
chain_len++;
|
|
|
|
ptr = index->val.node;
|
|
|
|
}
|
|
|
|
assert(deref->path_len == load->src.path_len + chain_len);
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2021-08-15 10:08:32 -07:00
|
|
|
struct hlsl_type *hlsl_deref_get_type(struct hlsl_ctx *ctx, const struct hlsl_deref *deref)
|
2022-06-30 15:20:20 -07:00
|
|
|
{
|
|
|
|
struct hlsl_type *type;
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
assert(deref);
|
2023-05-29 14:34:03 -07:00
|
|
|
|
2023-10-04 11:31:46 -07:00
|
|
|
if (hlsl_deref_is_lowered(deref))
|
2023-05-29 14:34:03 -07:00
|
|
|
return deref->data_type;
|
2022-06-30 15:20:20 -07:00
|
|
|
|
|
|
|
type = deref->var->data_type;
|
|
|
|
for (i = 0; i < deref->path_len; ++i)
|
2022-08-16 09:33:51 -07:00
|
|
|
type = hlsl_get_element_type_from_path_index(ctx, type, deref->path[i].node);
|
2022-06-30 15:20:20 -07:00
|
|
|
return type;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Initializes a deref from another deref (prefix) and a component index.
|
|
|
|
* *block is initialized to contain the new constant node instructions used by the deref's path. */
|
|
|
|
static bool init_deref_from_component_index(struct hlsl_ctx *ctx, struct hlsl_block *block,
|
|
|
|
struct hlsl_deref *deref, const struct hlsl_deref *prefix, unsigned int index,
|
|
|
|
const struct vkd3d_shader_location *loc)
|
|
|
|
{
|
|
|
|
unsigned int path_len, path_index, deref_path_len, i;
|
|
|
|
struct hlsl_type *path_type;
|
2022-11-10 19:06:04 -08:00
|
|
|
struct hlsl_ir_node *c;
|
2022-06-30 15:20:20 -07:00
|
|
|
|
2023-02-16 15:52:15 -08:00
|
|
|
hlsl_block_init(block);
|
2022-06-30 15:20:20 -07:00
|
|
|
|
|
|
|
path_len = 0;
|
2021-08-15 10:08:32 -07:00
|
|
|
path_type = hlsl_deref_get_type(ctx, prefix);
|
2022-06-30 15:20:20 -07:00
|
|
|
path_index = index;
|
|
|
|
while (!type_is_single_component(path_type))
|
|
|
|
{
|
|
|
|
traverse_path_from_component_index(ctx, &path_type, &path_index);
|
|
|
|
++path_len;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!init_deref(ctx, deref, prefix->var, prefix->path_len + path_len))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
deref_path_len = 0;
|
|
|
|
for (i = 0; i < prefix->path_len; ++i)
|
|
|
|
hlsl_src_from_node(&deref->path[deref_path_len++], prefix->path[i].node);
|
|
|
|
|
2021-08-15 10:08:32 -07:00
|
|
|
path_type = hlsl_deref_get_type(ctx, prefix);
|
2022-06-30 15:20:20 -07:00
|
|
|
path_index = index;
|
|
|
|
while (!type_is_single_component(path_type))
|
|
|
|
{
|
|
|
|
unsigned int next_index = traverse_path_from_component_index(ctx, &path_type, &path_index);
|
|
|
|
|
|
|
|
if (!(c = hlsl_new_uint_constant(ctx, next_index, loc)))
|
|
|
|
{
|
2023-04-14 11:02:15 -07:00
|
|
|
hlsl_block_cleanup(block);
|
2022-06-30 15:20:20 -07:00
|
|
|
return false;
|
|
|
|
}
|
2022-11-10 19:06:04 -08:00
|
|
|
hlsl_block_add_instr(block, c);
|
2022-06-30 15:20:20 -07:00
|
|
|
|
2022-11-10 19:06:04 -08:00
|
|
|
hlsl_src_from_node(&deref->path[deref_path_len++], c);
|
2022-06-30 15:20:20 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
assert(deref_path_len == deref->path_len);
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2022-08-16 09:33:51 -07:00
|
|
|
struct hlsl_type *hlsl_get_element_type_from_path_index(struct hlsl_ctx *ctx, const struct hlsl_type *type,
|
2022-06-30 10:16:37 -07:00
|
|
|
struct hlsl_ir_node *idx)
|
|
|
|
{
|
|
|
|
assert(idx);
|
|
|
|
|
2022-11-11 17:31:55 -08:00
|
|
|
switch (type->class)
|
2022-06-30 10:16:37 -07:00
|
|
|
{
|
|
|
|
case HLSL_CLASS_VECTOR:
|
|
|
|
return hlsl_get_scalar_type(ctx, type->base_type);
|
|
|
|
|
|
|
|
case HLSL_CLASS_MATRIX:
|
|
|
|
if (hlsl_type_is_row_major(type))
|
|
|
|
return hlsl_get_vector_type(ctx, type->base_type, type->dimx);
|
|
|
|
else
|
|
|
|
return hlsl_get_vector_type(ctx, type->base_type, type->dimy);
|
|
|
|
|
|
|
|
case HLSL_CLASS_ARRAY:
|
|
|
|
return type->e.array.type;
|
|
|
|
|
|
|
|
case HLSL_CLASS_STRUCT:
|
|
|
|
{
|
|
|
|
struct hlsl_ir_constant *c = hlsl_ir_constant(idx);
|
|
|
|
|
2022-11-11 16:39:55 -08:00
|
|
|
assert(c->value.u[0].u < type->e.record.field_count);
|
|
|
|
return type->e.record.fields[c->value.u[0].u].type;
|
2022-06-30 10:16:37 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
default:
|
2022-08-31 04:25:24 -07:00
|
|
|
vkd3d_unreachable();
|
2022-06-30 10:16:37 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-04 14:33:53 -08:00
|
|
|
struct hlsl_type *hlsl_new_array_type(struct hlsl_ctx *ctx, struct hlsl_type *basic_type, unsigned int array_size)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2021-06-23 21:57:33 -07:00
|
|
|
struct hlsl_type *type;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2021-06-23 21:57:33 -07:00
|
|
|
if (!(type = hlsl_alloc(ctx, sizeof(*type))))
|
2021-01-27 08:29:44 -08:00
|
|
|
return NULL;
|
|
|
|
|
2022-11-11 17:31:55 -08:00
|
|
|
type->class = HLSL_CLASS_ARRAY;
|
2021-01-27 08:29:44 -08:00
|
|
|
type->modifiers = basic_type->modifiers;
|
|
|
|
type->e.array.elements_count = array_size;
|
|
|
|
type->e.array.type = basic_type;
|
|
|
|
type->dimx = basic_type->dimx;
|
|
|
|
type->dimy = basic_type->dimy;
|
2023-05-29 18:59:17 -07:00
|
|
|
type->sampler_dim = basic_type->sampler_dim;
|
2021-06-23 21:57:34 -07:00
|
|
|
hlsl_type_calculate_reg_size(ctx, type);
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2021-06-23 21:57:33 -07:00
|
|
|
list_add_tail(&ctx->types, &type->entry);
|
|
|
|
|
|
|
|
return type;
|
2021-01-30 11:51:32 -08:00
|
|
|
}
|
|
|
|
|
2022-07-14 18:23:43 -07:00
|
|
|
struct hlsl_type *hlsl_new_struct_type(struct hlsl_ctx *ctx, const char *name,
|
|
|
|
struct hlsl_struct_field *fields, size_t field_count)
|
2021-01-30 11:51:32 -08:00
|
|
|
{
|
|
|
|
struct hlsl_type *type;
|
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
if (!(type = hlsl_alloc(ctx, sizeof(*type))))
|
2021-01-30 11:51:32 -08:00
|
|
|
return NULL;
|
2022-11-11 17:31:55 -08:00
|
|
|
type->class = HLSL_CLASS_STRUCT;
|
2021-01-30 11:51:32 -08:00
|
|
|
type->base_type = HLSL_TYPE_VOID;
|
|
|
|
type->name = name;
|
|
|
|
type->dimy = 1;
|
2022-07-14 18:23:43 -07:00
|
|
|
type->e.record.fields = fields;
|
|
|
|
type->e.record.field_count = field_count;
|
2021-06-23 21:57:34 -07:00
|
|
|
hlsl_type_calculate_reg_size(ctx, type);
|
2021-01-30 11:51:32 -08:00
|
|
|
|
2021-02-04 14:33:53 -08:00
|
|
|
list_add_tail(&ctx->types, &type->entry);
|
2021-01-30 11:51:32 -08:00
|
|
|
|
|
|
|
return type;
|
|
|
|
}
|
|
|
|
|
2023-03-04 10:46:12 -08:00
|
|
|
struct hlsl_type *hlsl_new_texture_type(struct hlsl_ctx *ctx, enum hlsl_sampler_dim dim,
|
|
|
|
struct hlsl_type *format, unsigned int sample_count)
|
2021-10-07 19:58:53 -07:00
|
|
|
{
|
|
|
|
struct hlsl_type *type;
|
|
|
|
|
2022-01-21 13:22:25 -08:00
|
|
|
if (!(type = hlsl_alloc(ctx, sizeof(*type))))
|
2021-10-07 19:58:53 -07:00
|
|
|
return NULL;
|
2022-11-11 17:31:55 -08:00
|
|
|
type->class = HLSL_CLASS_OBJECT;
|
2021-10-07 19:58:53 -07:00
|
|
|
type->base_type = HLSL_TYPE_TEXTURE;
|
|
|
|
type->dimx = 4;
|
|
|
|
type->dimy = 1;
|
|
|
|
type->sampler_dim = dim;
|
2021-10-07 19:58:55 -07:00
|
|
|
type->e.resource_format = format;
|
2023-03-04 10:46:12 -08:00
|
|
|
type->sample_count = sample_count;
|
2022-01-21 13:22:26 -08:00
|
|
|
hlsl_type_calculate_reg_size(ctx, type);
|
2021-10-07 19:58:53 -07:00
|
|
|
list_add_tail(&ctx->types, &type->entry);
|
|
|
|
return type;
|
|
|
|
}
|
|
|
|
|
2021-08-12 19:03:26 -07:00
|
|
|
struct hlsl_type *hlsl_new_uav_type(struct hlsl_ctx *ctx, enum hlsl_sampler_dim dim, struct hlsl_type *format)
|
|
|
|
{
|
|
|
|
struct hlsl_type *type;
|
|
|
|
|
2023-08-01 13:36:24 -07:00
|
|
|
if (!(type = hlsl_alloc(ctx, sizeof(*type))))
|
2021-08-12 19:03:26 -07:00
|
|
|
return NULL;
|
2022-11-11 17:31:55 -08:00
|
|
|
type->class = HLSL_CLASS_OBJECT;
|
2021-08-12 19:03:26 -07:00
|
|
|
type->base_type = HLSL_TYPE_UAV;
|
|
|
|
type->dimx = format->dimx;
|
|
|
|
type->dimy = 1;
|
|
|
|
type->sampler_dim = dim;
|
|
|
|
type->e.resource_format = format;
|
|
|
|
hlsl_type_calculate_reg_size(ctx, type);
|
|
|
|
list_add_tail(&ctx->types, &type->entry);
|
|
|
|
return type;
|
|
|
|
}
|
|
|
|
|
2023-02-18 16:44:20 -08:00
|
|
|
static const char * get_case_insensitive_typename(const char *name)
|
|
|
|
{
|
|
|
|
static const char *const names[] =
|
|
|
|
{
|
|
|
|
"dword",
|
2023-02-21 14:59:04 -08:00
|
|
|
"float",
|
2023-02-24 02:39:45 -08:00
|
|
|
"matrix",
|
2023-02-26 06:25:06 -08:00
|
|
|
"vector",
|
2023-02-18 16:44:20 -08:00
|
|
|
};
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
for (i = 0; i < ARRAY_SIZE(names); ++i)
|
|
|
|
{
|
|
|
|
if (!ascii_strcasecmp(names[i], name))
|
|
|
|
return names[i];
|
|
|
|
}
|
|
|
|
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct hlsl_type *hlsl_get_type(struct hlsl_scope *scope, const char *name, bool recursive, bool case_insensitive)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
struct rb_entry *entry = rb_get(&scope->types, name);
|
|
|
|
|
|
|
|
if (entry)
|
|
|
|
return RB_ENTRY_VALUE(entry, struct hlsl_type, scope_entry);
|
|
|
|
|
2023-02-18 16:44:20 -08:00
|
|
|
if (scope->upper)
|
|
|
|
{
|
|
|
|
if (recursive)
|
|
|
|
return hlsl_get_type(scope->upper, name, recursive, case_insensitive);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
if (case_insensitive && (name = get_case_insensitive_typename(name)))
|
|
|
|
{
|
|
|
|
if ((entry = rb_get(&scope->types, name)))
|
|
|
|
return RB_ENTRY_VALUE(entry, struct hlsl_type, scope_entry);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2021-09-11 16:18:50 -07:00
|
|
|
struct hlsl_ir_function *hlsl_get_function(struct hlsl_ctx *ctx, const char *name)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2021-09-11 16:18:50 -07:00
|
|
|
struct rb_entry *entry;
|
|
|
|
|
|
|
|
if ((entry = rb_get(&ctx->functions, name)))
|
|
|
|
return RB_ENTRY_VALUE(entry, struct hlsl_ir_function, entry);
|
|
|
|
return NULL;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
2023-09-08 14:21:47 -07:00
|
|
|
struct hlsl_ir_function_decl *hlsl_get_first_func_decl(struct hlsl_ctx *ctx, const char *name)
|
2021-03-04 15:33:24 -08:00
|
|
|
{
|
|
|
|
struct hlsl_ir_function *func;
|
|
|
|
struct rb_entry *entry;
|
|
|
|
|
|
|
|
if ((entry = rb_get(&ctx->functions, name)))
|
|
|
|
{
|
|
|
|
func = RB_ENTRY_VALUE(entry, struct hlsl_ir_function, entry);
|
vkd3d-shader/hlsl: Store function overloads in a list.
The choice to store them in an rbtree was made early on. It does not seem likely
that HLSL programs would define many overloads for any of their functions, but I
suspect the idea was rather that intrinsics would be defined as plain
hlsl_ir_function_decl structures [cf. 447463e5900ca6a636998a65429b8a08a5441657]
and that some intrinsics that could operate on any type would therefore need
many overrides.
This is not how we deal with intrinsics, however. When the first intrinsics were
implemented I made the choice disregard this intended design, and instead match
and convert their types manually, in C. Nothing that has happened in the time
since has led me to question that choice, and in fact, the flexibility with
which we must accommodate functions has led me to believe that matching in this
way was definitely the right choice. The main other designs I see would have
been:
* define each intrinsic variant separately using existing HLSL types. Besides
efficiency concerns (i.e. this would take more space in memory, and would take
longer to generate each variant), the normal type-matching rules don't really
apply to intrinsics.
[For example: elementwise intrinsics like abs() return the same type as the
input, including preserving the distinction between float and float1. It is
legal to define separate HLSL overloads taking float and float1, but trying to
invoke these functions yields an "ambiguous function call" error.]
* introduce new (semi-)generic types. This is far more code and ends up acting
like our current scheme (with helpers) in a slightly more complex form.
So I think we can go ahead and rip out this vestige of the original design for
intrinsics.
As for why to change it: rbtrees are simply more complex to deal with, and it
seems unlikely to me that the difference is going to matter. I do not expect any
program to define large quantities of intrinsics; linked list search should be
good enough.
2023-09-08 14:27:10 -07:00
|
|
|
return LIST_ENTRY(list_head(&func->overloads), struct hlsl_ir_function_decl, entry);
|
2021-03-04 15:33:24 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2022-06-30 15:20:20 -07:00
|
|
|
unsigned int hlsl_type_component_count(const struct hlsl_type *type)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2022-11-11 17:31:55 -08:00
|
|
|
switch (type->class)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2022-07-22 09:42:17 -07:00
|
|
|
case HLSL_CLASS_SCALAR:
|
|
|
|
case HLSL_CLASS_VECTOR:
|
|
|
|
case HLSL_CLASS_MATRIX:
|
|
|
|
return type->dimx * type->dimy;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2022-07-22 09:42:17 -07:00
|
|
|
case HLSL_CLASS_STRUCT:
|
|
|
|
{
|
|
|
|
unsigned int count = 0, i;
|
|
|
|
|
|
|
|
for (i = 0; i < type->e.record.field_count; ++i)
|
|
|
|
count += hlsl_type_component_count(type->e.record.fields[i].type);
|
|
|
|
return count;
|
|
|
|
}
|
|
|
|
|
|
|
|
case HLSL_CLASS_ARRAY:
|
|
|
|
return hlsl_type_component_count(type->e.array.type) * type->e.array.elements_count;
|
|
|
|
|
|
|
|
case HLSL_CLASS_OBJECT:
|
|
|
|
return 1;
|
|
|
|
|
|
|
|
default:
|
2022-08-31 04:25:24 -07:00
|
|
|
vkd3d_unreachable();
|
2022-07-22 09:42:17 -07:00
|
|
|
}
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
2021-03-17 22:22:19 -07:00
|
|
|
bool hlsl_types_are_equal(const struct hlsl_type *t1, const struct hlsl_type *t2)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
if (t1 == t2)
|
2021-02-02 14:11:17 -08:00
|
|
|
return true;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2022-11-11 17:31:55 -08:00
|
|
|
if (t1->class != t2->class)
|
2021-02-02 14:11:17 -08:00
|
|
|
return false;
|
2021-01-27 08:29:44 -08:00
|
|
|
if (t1->base_type != t2->base_type)
|
2021-02-02 14:11:17 -08:00
|
|
|
return false;
|
2021-08-12 19:03:26 -07:00
|
|
|
if (t1->base_type == HLSL_TYPE_SAMPLER || t1->base_type == HLSL_TYPE_TEXTURE
|
|
|
|
|| t1->base_type == HLSL_TYPE_UAV)
|
2021-10-07 19:58:55 -07:00
|
|
|
{
|
|
|
|
if (t1->sampler_dim != t2->sampler_dim)
|
|
|
|
return false;
|
|
|
|
if (t1->base_type == HLSL_TYPE_TEXTURE && t1->sampler_dim != HLSL_SAMPLER_DIM_GENERIC
|
|
|
|
&& !hlsl_types_are_equal(t1->e.resource_format, t2->e.resource_format))
|
|
|
|
return false;
|
|
|
|
}
|
2021-09-27 05:50:57 -07:00
|
|
|
if ((t1->modifiers & HLSL_MODIFIER_ROW_MAJOR)
|
|
|
|
!= (t2->modifiers & HLSL_MODIFIER_ROW_MAJOR))
|
2021-02-02 14:11:17 -08:00
|
|
|
return false;
|
2021-01-27 08:29:44 -08:00
|
|
|
if (t1->dimx != t2->dimx)
|
2021-02-02 14:11:17 -08:00
|
|
|
return false;
|
2021-01-27 08:29:44 -08:00
|
|
|
if (t1->dimy != t2->dimy)
|
2021-02-02 14:11:17 -08:00
|
|
|
return false;
|
2022-11-11 17:31:55 -08:00
|
|
|
if (t1->class == HLSL_CLASS_STRUCT)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2022-07-14 18:23:43 -07:00
|
|
|
size_t i;
|
|
|
|
|
|
|
|
if (t1->e.record.field_count != t2->e.record.field_count)
|
|
|
|
return false;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2022-07-14 18:23:43 -07:00
|
|
|
for (i = 0; i < t1->e.record.field_count; ++i)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2022-07-14 18:23:43 -07:00
|
|
|
const struct hlsl_struct_field *field1 = &t1->e.record.fields[i];
|
|
|
|
const struct hlsl_struct_field *field2 = &t2->e.record.fields[i];
|
|
|
|
|
|
|
|
if (!hlsl_types_are_equal(field1->type, field2->type))
|
2021-02-02 14:11:17 -08:00
|
|
|
return false;
|
2022-07-14 18:23:43 -07:00
|
|
|
|
|
|
|
if (strcmp(field1->name, field2->name))
|
2021-02-02 14:11:17 -08:00
|
|
|
return false;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
}
|
2022-11-11 17:31:55 -08:00
|
|
|
if (t1->class == HLSL_CLASS_ARRAY)
|
2021-01-27 08:29:44 -08:00
|
|
|
return t1->e.array.elements_count == t2->e.array.elements_count
|
2021-03-17 22:22:19 -07:00
|
|
|
&& hlsl_types_are_equal(t1->e.array.type, t2->e.array.type);
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2021-02-02 14:11:17 -08:00
|
|
|
return true;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
2021-06-23 21:57:32 -07:00
|
|
|
struct hlsl_type *hlsl_type_clone(struct hlsl_ctx *ctx, struct hlsl_type *old,
|
|
|
|
unsigned int default_majority, unsigned int modifiers)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
struct hlsl_type *type;
|
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
if (!(type = hlsl_alloc(ctx, sizeof(*type))))
|
2021-01-27 08:29:44 -08:00
|
|
|
return NULL;
|
|
|
|
|
|
|
|
if (old->name)
|
|
|
|
{
|
2021-05-20 22:32:20 -07:00
|
|
|
type->name = hlsl_strdup(ctx, old->name);
|
2021-01-27 08:29:44 -08:00
|
|
|
if (!type->name)
|
|
|
|
{
|
|
|
|
vkd3d_free(type);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|
2022-11-11 17:31:55 -08:00
|
|
|
type->class = old->class;
|
2021-01-27 08:29:44 -08:00
|
|
|
type->base_type = old->base_type;
|
|
|
|
type->dimx = old->dimx;
|
|
|
|
type->dimy = old->dimy;
|
2021-06-23 21:57:32 -07:00
|
|
|
type->modifiers = old->modifiers | modifiers;
|
2021-01-27 08:29:44 -08:00
|
|
|
if (!(type->modifiers & HLSL_MODIFIERS_MAJORITY_MASK))
|
|
|
|
type->modifiers |= default_majority;
|
|
|
|
type->sampler_dim = old->sampler_dim;
|
vkd3d-shader/hlsl: Reinterpret minimum precision types as their regular counterparts.
Reinterpret min16float, min10float, min16int, min12int, and min16uint
as their regular counterparts: float, float, int, int, uint,
respectively.
A proper implementation would require adding minimum precision
indicators to all the dxbc-tpf instructions that use these types.
Consider the output of fxc 10.1 with the following shader:
uniform int i;
float4 main() : sv_target
{
min16float4 a = {0, 1, 2, i};
min16int2 b = {4, i};
min10float3 c = {6.4, 7, i};
min12int d = 9.4;
min16uint4x2 e = {14.4, 15, 16, 17, 18, 19, 20, i};
return mul(e, b) + a + c.xyzx + d;
}
However, if the graphics driver doesn't have minimum precision support,
it ignores the minimum precision indicators and runs at 32-bit
precision, which is equivalent as working with regular types.
2022-12-07 13:06:06 -08:00
|
|
|
type->is_minimum_precision = old->is_minimum_precision;
|
2022-11-11 17:31:55 -08:00
|
|
|
switch (old->class)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
case HLSL_CLASS_ARRAY:
|
2022-07-14 18:23:44 -07:00
|
|
|
if (!(type->e.array.type = hlsl_type_clone(ctx, old->e.array.type, default_majority, modifiers)))
|
|
|
|
{
|
|
|
|
vkd3d_free((void *)type->name);
|
|
|
|
vkd3d_free(type);
|
|
|
|
return NULL;
|
|
|
|
}
|
2021-01-27 08:29:44 -08:00
|
|
|
type->e.array.elements_count = old->e.array.elements_count;
|
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_CLASS_STRUCT:
|
|
|
|
{
|
2022-07-14 18:23:43 -07:00
|
|
|
size_t field_count = old->e.record.field_count, i;
|
|
|
|
|
|
|
|
type->e.record.field_count = field_count;
|
|
|
|
|
2023-05-03 12:39:58 -07:00
|
|
|
if (!(type->e.record.fields = hlsl_calloc(ctx, field_count, sizeof(*type->e.record.fields))))
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
vkd3d_free((void *)type->name);
|
|
|
|
vkd3d_free(type);
|
|
|
|
return NULL;
|
|
|
|
}
|
2022-07-14 18:23:43 -07:00
|
|
|
|
|
|
|
for (i = 0; i < field_count; ++i)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2022-07-14 18:23:43 -07:00
|
|
|
const struct hlsl_struct_field *src_field = &old->e.record.fields[i];
|
|
|
|
struct hlsl_struct_field *dst_field = &type->e.record.fields[i];
|
|
|
|
|
|
|
|
dst_field->loc = src_field->loc;
|
2022-07-14 18:23:44 -07:00
|
|
|
if (!(dst_field->type = hlsl_type_clone(ctx, src_field->type, default_majority, modifiers)))
|
|
|
|
{
|
|
|
|
vkd3d_free(type->e.record.fields);
|
|
|
|
vkd3d_free((void *)type->name);
|
|
|
|
vkd3d_free(type);
|
|
|
|
return NULL;
|
|
|
|
}
|
2022-07-14 18:23:43 -07:00
|
|
|
dst_field->name = hlsl_strdup(ctx, src_field->name);
|
|
|
|
if (src_field->semantic.name)
|
2021-04-27 10:14:19 -07:00
|
|
|
{
|
2022-07-14 18:23:43 -07:00
|
|
|
dst_field->semantic.name = hlsl_strdup(ctx, src_field->semantic.name);
|
|
|
|
dst_field->semantic.index = src_field->semantic.index;
|
2021-04-27 10:14:19 -07:00
|
|
|
}
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2021-06-23 21:57:34 -07:00
|
|
|
hlsl_type_calculate_reg_size(ctx, type);
|
2021-06-23 21:57:33 -07:00
|
|
|
|
2021-02-04 14:33:53 -08:00
|
|
|
list_add_tail(&ctx->types, &type->entry);
|
2021-01-27 08:29:44 -08:00
|
|
|
return type;
|
|
|
|
}
|
|
|
|
|
2021-02-04 14:33:50 -08:00
|
|
|
bool hlsl_scope_add_type(struct hlsl_scope *scope, struct hlsl_type *type)
|
|
|
|
{
|
2023-02-18 16:44:20 -08:00
|
|
|
if (hlsl_get_type(scope, type->name, false, false))
|
2021-02-04 14:33:50 -08:00
|
|
|
return false;
|
|
|
|
|
|
|
|
rb_put(&scope->types, type->name, &type->scope_entry);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2022-11-10 17:39:42 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_cast(struct hlsl_ctx *ctx, struct hlsl_ir_node *node, struct hlsl_type *type,
|
2021-10-07 19:58:57 -07:00
|
|
|
const struct vkd3d_shader_location *loc)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
struct hlsl_ir_node *cast;
|
|
|
|
|
2023-04-14 00:02:14 -07:00
|
|
|
cast = hlsl_new_unary_expr(ctx, HLSL_OP1_CAST, node, loc);
|
2021-01-27 08:29:44 -08:00
|
|
|
if (cast)
|
|
|
|
cast->data_type = type;
|
2022-11-10 17:39:42 -08:00
|
|
|
return cast;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
2022-11-10 17:39:42 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_copy(struct hlsl_ctx *ctx, struct hlsl_ir_node *node)
|
2021-03-16 14:31:52 -07:00
|
|
|
{
|
|
|
|
/* Use a cast to the same type as a makeshift identity expression. */
|
2021-05-20 22:32:20 -07:00
|
|
|
return hlsl_new_cast(ctx, node, node->data_type, &node->loc);
|
2021-03-16 14:31:52 -07:00
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
struct hlsl_ir_var *hlsl_new_var(struct hlsl_ctx *ctx, const char *name, struct hlsl_type *type,
|
2023-04-14 00:02:14 -07:00
|
|
|
const struct vkd3d_shader_location *loc, const struct hlsl_semantic *semantic, unsigned int modifiers,
|
2021-04-27 10:14:19 -07:00
|
|
|
const struct hlsl_reg_reservation *reg_reservation)
|
2021-01-30 11:51:32 -08:00
|
|
|
{
|
|
|
|
struct hlsl_ir_var *var;
|
2022-11-25 14:47:56 -08:00
|
|
|
unsigned int k;
|
2021-01-30 11:51:32 -08:00
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
if (!(var = hlsl_alloc(ctx, sizeof(*var))))
|
2021-01-30 11:51:32 -08:00
|
|
|
return NULL;
|
|
|
|
|
|
|
|
var->name = name;
|
|
|
|
var->data_type = type;
|
2023-04-14 00:02:14 -07:00
|
|
|
var->loc = *loc;
|
2021-04-27 10:14:19 -07:00
|
|
|
if (semantic)
|
|
|
|
var->semantic = *semantic;
|
2022-11-29 11:10:40 -08:00
|
|
|
var->storage_modifiers = modifiers;
|
2021-05-31 19:41:14 -07:00
|
|
|
if (reg_reservation)
|
|
|
|
var->reg_reservation = *reg_reservation;
|
2022-11-25 14:47:56 -08:00
|
|
|
|
|
|
|
for (k = 0; k <= HLSL_REGSET_LAST_OBJECT; ++k)
|
|
|
|
{
|
|
|
|
unsigned int i, obj_count = type->reg_size[k];
|
|
|
|
|
|
|
|
if (obj_count == 0)
|
|
|
|
continue;
|
|
|
|
|
2023-05-03 12:39:58 -07:00
|
|
|
if (!(var->objects_usage[k] = hlsl_calloc(ctx, obj_count, sizeof(*var->objects_usage[0]))))
|
2022-11-25 14:47:56 -08:00
|
|
|
{
|
|
|
|
for (i = 0; i < k; ++i)
|
|
|
|
vkd3d_free(var->objects_usage[i]);
|
|
|
|
vkd3d_free(var);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-01-30 11:51:32 -08:00
|
|
|
return var;
|
|
|
|
}
|
|
|
|
|
2022-09-26 15:51:36 -07:00
|
|
|
struct hlsl_ir_var *hlsl_new_synthetic_var(struct hlsl_ctx *ctx, const char *template,
|
|
|
|
struct hlsl_type *type, const struct vkd3d_shader_location *loc)
|
2021-01-30 11:51:32 -08:00
|
|
|
{
|
2022-09-26 15:51:36 -07:00
|
|
|
struct vkd3d_string_buffer *string;
|
|
|
|
struct hlsl_ir_var *var;
|
2021-01-30 11:51:32 -08:00
|
|
|
|
2022-09-26 15:51:36 -07:00
|
|
|
if (!(string = hlsl_get_string_buffer(ctx)))
|
|
|
|
return NULL;
|
2023-08-29 14:50:13 -07:00
|
|
|
vkd3d_string_buffer_printf(string, "<%s-%u>", template, ctx->internal_name_counter++);
|
2023-05-16 08:35:14 -07:00
|
|
|
var = hlsl_new_synthetic_var_named(ctx, string->buffer, type, loc, true);
|
2022-09-26 15:51:36 -07:00
|
|
|
hlsl_release_string_buffer(ctx, string);
|
2023-05-16 08:35:14 -07:00
|
|
|
return var;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct hlsl_ir_var *hlsl_new_synthetic_var_named(struct hlsl_ctx *ctx, const char *name,
|
|
|
|
struct hlsl_type *type, const struct vkd3d_shader_location *loc, bool dummy_scope)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_var *var;
|
|
|
|
const char *name_copy;
|
|
|
|
|
|
|
|
if (!(name_copy = hlsl_strdup(ctx, name)))
|
|
|
|
return NULL;
|
|
|
|
var = hlsl_new_var(ctx, name_copy, type, loc, NULL, 0, NULL);
|
2021-01-30 11:51:32 -08:00
|
|
|
if (var)
|
2023-05-16 08:35:14 -07:00
|
|
|
{
|
|
|
|
if (dummy_scope)
|
|
|
|
list_add_tail(&ctx->dummy_scope->vars, &var->scope_entry);
|
|
|
|
else
|
|
|
|
list_add_tail(&ctx->globals->vars, &var->scope_entry);
|
|
|
|
}
|
2021-01-30 11:51:32 -08:00
|
|
|
return var;
|
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:17 -08:00
|
|
|
static bool type_is_single_reg(const struct hlsl_type *type)
|
2021-01-30 11:51:32 -08:00
|
|
|
{
|
2022-11-11 17:31:55 -08:00
|
|
|
return type->class == HLSL_CLASS_SCALAR || type->class == HLSL_CLASS_VECTOR;
|
2021-01-30 11:51:32 -08:00
|
|
|
}
|
|
|
|
|
2022-09-26 16:19:54 -07:00
|
|
|
bool hlsl_copy_deref(struct hlsl_ctx *ctx, struct hlsl_deref *deref, const struct hlsl_deref *other)
|
2022-07-12 13:58:40 -07:00
|
|
|
{
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
memset(deref, 0, sizeof(*deref));
|
|
|
|
|
|
|
|
if (!other)
|
|
|
|
return true;
|
|
|
|
|
2023-10-04 11:31:46 -07:00
|
|
|
assert(!hlsl_deref_is_lowered(other));
|
2022-07-12 13:58:40 -07:00
|
|
|
|
|
|
|
if (!init_deref(ctx, deref, other->var, other->path_len))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
for (i = 0; i < deref->path_len; ++i)
|
|
|
|
hlsl_src_from_node(&deref->path[i], other->path[i].node);
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2022-06-30 15:20:20 -07:00
|
|
|
void hlsl_cleanup_deref(struct hlsl_deref *deref)
|
2022-07-14 18:23:46 -07:00
|
|
|
{
|
2022-06-30 15:20:20 -07:00
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
for (i = 0; i < deref->path_len; ++i)
|
|
|
|
hlsl_src_remove(&deref->path[i]);
|
|
|
|
vkd3d_free(deref->path);
|
|
|
|
|
|
|
|
deref->path = NULL;
|
|
|
|
deref->path_len = 0;
|
|
|
|
|
2023-10-06 09:56:24 -07:00
|
|
|
hlsl_src_remove(&deref->rel_offset);
|
vkd3d-shader/hlsl: Split deref-offset into a node and a constant uint.
This uint will be used for the following:
- Since SM4's relative addressing (the capability of passing a register
as an index to another register) only has whole-register granularity,
we will need to make the offset node express the offset in
whole-registers and specify the register component in this uint,
otherwise we would have to add additional / and % operations in the
output binary.
- If, after we apply constant folding and copy propagation, we determine
that the offset is a single constant node, we can store all the offset
in this uint constant, and remove the offset src.
This allows DCE to remove a good bunch of the nodes previously required
only for the offset constants, which makes the output more liteweight
and readable, and simplifies the implementation of relative addressing
when writing tpf in the following patches.
In dump_deref(), we use "c" to indicate components instead of whole
registers. Since now both the offset node and the offset uint are in
components a lowered deref would look like:
var[@42c + 2c]
But, once we express the offset node in whole registers we will remove
the "c" from the node part:
var[@22 + 3c]
2023-10-03 12:47:13 -07:00
|
|
|
deref->const_offset = 0;
|
2022-07-14 18:23:46 -07:00
|
|
|
}
|
|
|
|
|
2023-03-07 17:04:37 -08:00
|
|
|
/* Initializes a simple variable dereference, so that it can be passed to load/store functions. */
|
2022-06-30 15:20:20 -07:00
|
|
|
void hlsl_init_simple_deref_from_var(struct hlsl_deref *deref, struct hlsl_ir_var *var)
|
|
|
|
{
|
|
|
|
memset(deref, 0, sizeof(*deref));
|
|
|
|
deref->var = var;
|
|
|
|
}
|
|
|
|
|
2022-09-26 17:00:09 -07:00
|
|
|
static void init_node(struct hlsl_ir_node *node, enum hlsl_ir_node_type type,
|
2022-09-26 17:21:34 -07:00
|
|
|
struct hlsl_type *data_type, const struct vkd3d_shader_location *loc)
|
2022-09-26 17:00:09 -07:00
|
|
|
{
|
|
|
|
memset(node, 0, sizeof(*node));
|
|
|
|
node->type = type;
|
|
|
|
node->data_type = data_type;
|
2022-09-26 17:21:34 -07:00
|
|
|
node->loc = *loc;
|
2022-09-26 17:00:09 -07:00
|
|
|
list_init(&node->uses);
|
|
|
|
}
|
|
|
|
|
2022-11-10 18:55:03 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_simple_store(struct hlsl_ctx *ctx, struct hlsl_ir_var *lhs, struct hlsl_ir_node *rhs)
|
2021-01-30 11:51:32 -08:00
|
|
|
{
|
2022-07-13 13:56:21 -07:00
|
|
|
struct hlsl_deref lhs_deref;
|
|
|
|
|
|
|
|
hlsl_init_simple_deref_from_var(&lhs_deref, lhs);
|
2022-11-10 18:57:00 -08:00
|
|
|
return hlsl_new_store_index(ctx, &lhs_deref, NULL, rhs, 0, &rhs->loc);
|
2022-07-13 13:56:21 -07:00
|
|
|
}
|
|
|
|
|
2022-11-10 18:57:00 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_store_index(struct hlsl_ctx *ctx, const struct hlsl_deref *lhs,
|
2022-07-13 13:56:21 -07:00
|
|
|
struct hlsl_ir_node *idx, struct hlsl_ir_node *rhs, unsigned int writemask, const struct vkd3d_shader_location *loc)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_store *store;
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
assert(lhs);
|
2023-10-04 11:31:46 -07:00
|
|
|
assert(!hlsl_deref_is_lowered(lhs));
|
2022-07-13 13:56:21 -07:00
|
|
|
|
|
|
|
if (!(store = hlsl_alloc(ctx, sizeof(*store))))
|
|
|
|
return NULL;
|
2022-09-26 17:21:34 -07:00
|
|
|
init_node(&store->node, HLSL_IR_STORE, NULL, loc);
|
2022-07-13 13:56:21 -07:00
|
|
|
|
|
|
|
if (!init_deref(ctx, &store->lhs, lhs->var, lhs->path_len + !!idx))
|
2022-09-14 12:57:13 -07:00
|
|
|
{
|
|
|
|
vkd3d_free(store);
|
2022-07-13 13:56:21 -07:00
|
|
|
return NULL;
|
2022-09-14 12:57:13 -07:00
|
|
|
}
|
2022-07-13 13:56:21 -07:00
|
|
|
for (i = 0; i < lhs->path_len; ++i)
|
|
|
|
hlsl_src_from_node(&store->lhs.path[i], lhs->path[i].node);
|
|
|
|
if (idx)
|
|
|
|
hlsl_src_from_node(&store->lhs.path[lhs->path_len], idx);
|
|
|
|
|
|
|
|
hlsl_src_from_node(&store->rhs, rhs);
|
|
|
|
|
|
|
|
if (!writemask && type_is_single_reg(rhs->data_type))
|
|
|
|
writemask = (1 << rhs->data_type->dimx) - 1;
|
|
|
|
store->writemask = writemask;
|
|
|
|
|
2022-11-10 18:57:00 -08:00
|
|
|
return &store->node;
|
2021-01-30 11:51:32 -08:00
|
|
|
}
|
|
|
|
|
2022-11-10 18:42:25 -08:00
|
|
|
bool hlsl_new_store_component(struct hlsl_ctx *ctx, struct hlsl_block *block,
|
2022-07-14 12:11:51 -07:00
|
|
|
const struct hlsl_deref *lhs, unsigned int comp, struct hlsl_ir_node *rhs)
|
|
|
|
{
|
|
|
|
struct hlsl_block comp_path_block;
|
|
|
|
struct hlsl_ir_store *store;
|
|
|
|
|
2023-02-16 15:52:15 -08:00
|
|
|
hlsl_block_init(block);
|
2022-07-14 12:11:51 -07:00
|
|
|
|
|
|
|
if (!(store = hlsl_alloc(ctx, sizeof(*store))))
|
2022-11-10 18:42:25 -08:00
|
|
|
return false;
|
2022-09-26 17:21:34 -07:00
|
|
|
init_node(&store->node, HLSL_IR_STORE, NULL, &rhs->loc);
|
2022-07-14 12:11:51 -07:00
|
|
|
|
|
|
|
if (!init_deref_from_component_index(ctx, &comp_path_block, &store->lhs, lhs, comp, &rhs->loc))
|
|
|
|
{
|
|
|
|
vkd3d_free(store);
|
2022-11-10 18:42:25 -08:00
|
|
|
return false;
|
2022-07-14 12:11:51 -07:00
|
|
|
}
|
2023-02-16 16:00:01 -08:00
|
|
|
hlsl_block_add_block(block, &comp_path_block);
|
2022-07-14 12:11:51 -07:00
|
|
|
hlsl_src_from_node(&store->rhs, rhs);
|
|
|
|
|
|
|
|
if (type_is_single_reg(rhs->data_type))
|
|
|
|
store->writemask = (1 << rhs->data_type->dimx) - 1;
|
|
|
|
|
2022-11-11 19:13:50 -08:00
|
|
|
hlsl_block_add_instr(block, &store->node);
|
2022-07-14 12:11:51 -07:00
|
|
|
|
2022-11-10 18:42:25 -08:00
|
|
|
return true;
|
2022-07-14 12:11:51 -07:00
|
|
|
}
|
|
|
|
|
2023-01-16 11:14:09 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_call(struct hlsl_ctx *ctx, struct hlsl_ir_function_decl *decl,
|
2021-09-09 19:06:38 -07:00
|
|
|
const struct vkd3d_shader_location *loc)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_call *call;
|
|
|
|
|
|
|
|
if (!(call = hlsl_alloc(ctx, sizeof(*call))))
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
init_node(&call->node, HLSL_IR_CALL, NULL, loc);
|
|
|
|
call->decl = decl;
|
|
|
|
return &call->node;
|
|
|
|
}
|
|
|
|
|
2022-11-11 17:13:26 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_constant(struct hlsl_ctx *ctx, struct hlsl_type *type,
|
2022-11-11 17:10:14 -08:00
|
|
|
const struct hlsl_constant_value *value, const struct vkd3d_shader_location *loc)
|
2021-12-01 08:15:00 -08:00
|
|
|
{
|
|
|
|
struct hlsl_ir_constant *c;
|
|
|
|
|
2022-11-11 17:31:55 -08:00
|
|
|
assert(type->class <= HLSL_CLASS_VECTOR);
|
2022-04-05 03:33:11 -07:00
|
|
|
|
2021-12-01 08:15:00 -08:00
|
|
|
if (!(c = hlsl_alloc(ctx, sizeof(*c))))
|
|
|
|
return NULL;
|
2022-04-05 03:33:11 -07:00
|
|
|
|
2022-09-26 17:21:34 -07:00
|
|
|
init_node(&c->node, HLSL_IR_CONSTANT, type, loc);
|
2022-11-11 17:10:14 -08:00
|
|
|
c->value = *value;
|
2022-04-05 03:33:11 -07:00
|
|
|
|
2022-11-11 17:13:26 -08:00
|
|
|
return &c->node;
|
2022-04-05 03:33:11 -07:00
|
|
|
}
|
|
|
|
|
2022-11-10 17:37:41 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_bool_constant(struct hlsl_ctx *ctx, bool b, const struct vkd3d_shader_location *loc)
|
2022-09-26 16:54:03 -07:00
|
|
|
{
|
2022-11-11 17:10:14 -08:00
|
|
|
struct hlsl_constant_value value;
|
2022-09-26 16:54:03 -07:00
|
|
|
|
2022-11-11 17:10:14 -08:00
|
|
|
value.u[0].u = b ? ~0u : 0;
|
2022-11-11 17:13:26 -08:00
|
|
|
return hlsl_new_constant(ctx, hlsl_get_scalar_type(ctx, HLSL_TYPE_BOOL), &value, loc);
|
2022-09-26 16:54:03 -07:00
|
|
|
}
|
|
|
|
|
2022-11-10 17:45:51 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_float_constant(struct hlsl_ctx *ctx, float f,
|
2022-09-26 16:50:04 -07:00
|
|
|
const struct vkd3d_shader_location *loc)
|
|
|
|
{
|
2022-11-11 17:10:14 -08:00
|
|
|
struct hlsl_constant_value value;
|
2022-09-26 16:50:04 -07:00
|
|
|
|
2022-11-11 17:10:14 -08:00
|
|
|
value.u[0].f = f;
|
2022-11-11 17:13:26 -08:00
|
|
|
return hlsl_new_constant(ctx, hlsl_get_scalar_type(ctx, HLSL_TYPE_FLOAT), &value, loc);
|
2022-09-26 16:50:04 -07:00
|
|
|
}
|
|
|
|
|
2022-11-10 18:06:59 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_int_constant(struct hlsl_ctx *ctx, int32_t n, const struct vkd3d_shader_location *loc)
|
2022-04-05 03:33:11 -07:00
|
|
|
{
|
2022-11-11 17:10:14 -08:00
|
|
|
struct hlsl_constant_value value;
|
2022-04-05 03:33:11 -07:00
|
|
|
|
2022-11-11 17:10:14 -08:00
|
|
|
value.u[0].i = n;
|
2022-11-11 17:13:26 -08:00
|
|
|
return hlsl_new_constant(ctx, hlsl_get_scalar_type(ctx, HLSL_TYPE_INT), &value, loc);
|
2021-12-01 08:15:00 -08:00
|
|
|
}
|
|
|
|
|
2022-11-10 19:06:04 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_uint_constant(struct hlsl_ctx *ctx, unsigned int n,
|
2022-04-05 03:33:10 -07:00
|
|
|
const struct vkd3d_shader_location *loc)
|
2021-01-30 11:51:32 -08:00
|
|
|
{
|
2022-11-11 17:10:14 -08:00
|
|
|
struct hlsl_constant_value value;
|
2021-01-30 11:51:32 -08:00
|
|
|
|
2022-11-11 17:10:14 -08:00
|
|
|
value.u[0].u = n;
|
2022-11-11 17:13:26 -08:00
|
|
|
return hlsl_new_constant(ctx, hlsl_get_scalar_type(ctx, HLSL_TYPE_UINT), &value, loc);
|
2021-01-30 11:51:32 -08:00
|
|
|
}
|
|
|
|
|
2022-09-26 17:00:09 -07:00
|
|
|
struct hlsl_ir_node *hlsl_new_expr(struct hlsl_ctx *ctx, enum hlsl_ir_expr_op op,
|
|
|
|
struct hlsl_ir_node *operands[HLSL_MAX_OPERANDS],
|
|
|
|
struct hlsl_type *data_type, const struct vkd3d_shader_location *loc)
|
2021-01-30 11:51:32 -08:00
|
|
|
{
|
|
|
|
struct hlsl_ir_expr *expr;
|
2022-09-26 17:00:09 -07:00
|
|
|
unsigned int i;
|
2021-01-30 11:51:32 -08:00
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
if (!(expr = hlsl_alloc(ctx, sizeof(*expr))))
|
2021-01-30 11:51:32 -08:00
|
|
|
return NULL;
|
2022-09-26 17:21:34 -07:00
|
|
|
init_node(&expr->node, HLSL_IR_EXPR, data_type, loc);
|
2021-01-30 11:51:32 -08:00
|
|
|
expr->op = op;
|
2022-09-26 17:00:09 -07:00
|
|
|
for (i = 0; i < HLSL_MAX_OPERANDS; ++i)
|
|
|
|
hlsl_src_from_node(&expr->operands[i], operands[i]);
|
2021-01-30 11:51:32 -08:00
|
|
|
return &expr->node;
|
|
|
|
}
|
|
|
|
|
2022-09-26 17:00:09 -07:00
|
|
|
struct hlsl_ir_node *hlsl_new_unary_expr(struct hlsl_ctx *ctx, enum hlsl_ir_expr_op op,
|
2023-04-14 00:02:14 -07:00
|
|
|
struct hlsl_ir_node *arg, const struct vkd3d_shader_location *loc)
|
2022-09-26 17:00:09 -07:00
|
|
|
{
|
|
|
|
struct hlsl_ir_node *operands[HLSL_MAX_OPERANDS] = {arg};
|
|
|
|
|
2023-04-14 00:02:14 -07:00
|
|
|
return hlsl_new_expr(ctx, op, operands, arg->data_type, loc);
|
2022-09-26 17:00:09 -07:00
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
struct hlsl_ir_node *hlsl_new_binary_expr(struct hlsl_ctx *ctx, enum hlsl_ir_expr_op op,
|
|
|
|
struct hlsl_ir_node *arg1, struct hlsl_ir_node *arg2)
|
2021-01-30 11:51:32 -08:00
|
|
|
{
|
2022-09-26 17:00:09 -07:00
|
|
|
struct hlsl_ir_node *operands[HLSL_MAX_OPERANDS] = {arg1, arg2};
|
2021-01-30 11:51:32 -08:00
|
|
|
|
2021-03-17 22:22:19 -07:00
|
|
|
assert(hlsl_types_are_equal(arg1->data_type, arg2->data_type));
|
2022-09-26 17:00:09 -07:00
|
|
|
return hlsl_new_expr(ctx, op, operands, arg1->data_type, &arg1->loc);
|
2021-01-30 11:51:32 -08:00
|
|
|
}
|
|
|
|
|
2022-11-10 18:05:53 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_if(struct hlsl_ctx *ctx, struct hlsl_ir_node *condition,
|
2022-11-10 18:04:22 -08:00
|
|
|
struct hlsl_block *then_block, struct hlsl_block *else_block, const struct vkd3d_shader_location *loc)
|
2021-01-30 11:51:32 -08:00
|
|
|
{
|
|
|
|
struct hlsl_ir_if *iff;
|
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
if (!(iff = hlsl_alloc(ctx, sizeof(*iff))))
|
2021-01-30 11:51:32 -08:00
|
|
|
return NULL;
|
2022-11-10 18:04:22 -08:00
|
|
|
init_node(&iff->node, HLSL_IR_IF, NULL, loc);
|
2021-01-30 11:51:32 -08:00
|
|
|
hlsl_src_from_node(&iff->condition, condition);
|
2022-11-10 18:04:22 -08:00
|
|
|
hlsl_block_init(&iff->then_block);
|
|
|
|
hlsl_block_add_block(&iff->then_block, then_block);
|
|
|
|
hlsl_block_init(&iff->else_block);
|
|
|
|
if (else_block)
|
|
|
|
hlsl_block_add_block(&iff->else_block, else_block);
|
2022-11-10 18:05:53 -08:00
|
|
|
return &iff->node;
|
2021-01-30 11:51:32 -08:00
|
|
|
}
|
|
|
|
|
2023-10-11 04:51:51 -07:00
|
|
|
struct hlsl_ir_switch_case *hlsl_new_switch_case(struct hlsl_ctx *ctx, unsigned int value,
|
|
|
|
bool is_default, struct hlsl_block *body, const struct vkd3d_shader_location *loc)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_switch_case *c;
|
|
|
|
|
|
|
|
if (!(c = hlsl_alloc(ctx, sizeof(*c))))
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
c->value = value;
|
|
|
|
c->is_default = is_default;
|
|
|
|
hlsl_block_init(&c->body);
|
|
|
|
if (body)
|
|
|
|
hlsl_block_add_block(&c->body, body);
|
|
|
|
c->loc = *loc;
|
|
|
|
|
|
|
|
return c;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct hlsl_ir_node *hlsl_new_switch(struct hlsl_ctx *ctx, struct hlsl_ir_node *selector,
|
|
|
|
struct list *cases, const struct vkd3d_shader_location *loc)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_switch *s;
|
|
|
|
|
|
|
|
if (!(s = hlsl_alloc(ctx, sizeof(*s))))
|
|
|
|
return NULL;
|
|
|
|
init_node(&s->node, HLSL_IR_SWITCH, NULL, loc);
|
|
|
|
hlsl_src_from_node(&s->selector, selector);
|
|
|
|
list_init(&s->cases);
|
|
|
|
if (cases)
|
|
|
|
list_move_head(&s->cases, cases);
|
|
|
|
|
|
|
|
return &s->node;
|
|
|
|
}
|
|
|
|
|
2022-06-30 15:20:20 -07:00
|
|
|
struct hlsl_ir_load *hlsl_new_load_index(struct hlsl_ctx *ctx, const struct hlsl_deref *deref,
|
|
|
|
struct hlsl_ir_node *idx, const struct vkd3d_shader_location *loc)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_load *load;
|
|
|
|
struct hlsl_type *type;
|
|
|
|
unsigned int i;
|
|
|
|
|
2023-10-04 11:31:46 -07:00
|
|
|
assert(!hlsl_deref_is_lowered(deref));
|
2022-06-30 15:20:20 -07:00
|
|
|
|
2021-08-15 10:08:32 -07:00
|
|
|
type = hlsl_deref_get_type(ctx, deref);
|
2022-06-30 15:20:20 -07:00
|
|
|
if (idx)
|
2022-08-16 09:33:51 -07:00
|
|
|
type = hlsl_get_element_type_from_path_index(ctx, type, idx);
|
2022-06-30 15:20:20 -07:00
|
|
|
|
|
|
|
if (!(load = hlsl_alloc(ctx, sizeof(*load))))
|
|
|
|
return NULL;
|
2022-09-26 17:21:34 -07:00
|
|
|
init_node(&load->node, HLSL_IR_LOAD, type, loc);
|
2022-06-30 15:20:20 -07:00
|
|
|
|
|
|
|
if (!init_deref(ctx, &load->src, deref->var, deref->path_len + !!idx))
|
|
|
|
{
|
|
|
|
vkd3d_free(load);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
for (i = 0; i < deref->path_len; ++i)
|
|
|
|
hlsl_src_from_node(&load->src.path[i], deref->path[i].node);
|
|
|
|
if (idx)
|
|
|
|
hlsl_src_from_node(&load->src.path[deref->path_len], idx);
|
|
|
|
|
|
|
|
return load;
|
|
|
|
}
|
|
|
|
|
2023-05-08 15:25:18 -07:00
|
|
|
struct hlsl_ir_load *hlsl_new_load_parent(struct hlsl_ctx *ctx, const struct hlsl_deref *deref,
|
|
|
|
const struct vkd3d_shader_location *loc)
|
|
|
|
{
|
|
|
|
/* This deref can only exists temporarily because it is not the real owner of its members. */
|
|
|
|
struct hlsl_deref tmp_deref;
|
|
|
|
|
|
|
|
assert(deref->path_len >= 1);
|
|
|
|
|
|
|
|
tmp_deref = *deref;
|
|
|
|
tmp_deref.path_len = deref->path_len - 1;
|
|
|
|
return hlsl_new_load_index(ctx, &tmp_deref, NULL, loc);
|
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
struct hlsl_ir_load *hlsl_new_var_load(struct hlsl_ctx *ctx, struct hlsl_ir_var *var,
|
2023-04-14 00:02:14 -07:00
|
|
|
const struct vkd3d_shader_location *loc)
|
2022-06-30 15:20:20 -07:00
|
|
|
{
|
|
|
|
struct hlsl_deref var_deref;
|
|
|
|
|
|
|
|
hlsl_init_simple_deref_from_var(&var_deref, var);
|
2023-04-14 00:02:14 -07:00
|
|
|
return hlsl_new_load_index(ctx, &var_deref, NULL, loc);
|
2022-06-30 15:20:20 -07:00
|
|
|
}
|
|
|
|
|
2022-11-10 18:19:39 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_load_component(struct hlsl_ctx *ctx, struct hlsl_block *block,
|
2022-06-30 15:20:20 -07:00
|
|
|
const struct hlsl_deref *deref, unsigned int comp, const struct vkd3d_shader_location *loc)
|
2021-03-17 22:22:20 -07:00
|
|
|
{
|
2022-06-30 15:20:20 -07:00
|
|
|
struct hlsl_type *type, *comp_type;
|
|
|
|
struct hlsl_block comp_path_block;
|
|
|
|
struct hlsl_ir_load *load;
|
|
|
|
|
2023-02-16 15:52:15 -08:00
|
|
|
hlsl_block_init(block);
|
2022-06-30 15:20:20 -07:00
|
|
|
|
|
|
|
if (!(load = hlsl_alloc(ctx, sizeof(*load))))
|
|
|
|
return NULL;
|
|
|
|
|
2021-08-15 10:08:32 -07:00
|
|
|
type = hlsl_deref_get_type(ctx, deref);
|
2022-06-30 15:20:20 -07:00
|
|
|
comp_type = hlsl_type_get_component_type(ctx, type, comp);
|
2022-09-26 17:21:34 -07:00
|
|
|
init_node(&load->node, HLSL_IR_LOAD, comp_type, loc);
|
2022-06-30 15:20:20 -07:00
|
|
|
|
|
|
|
if (!init_deref_from_component_index(ctx, &comp_path_block, &load->src, deref, comp, loc))
|
|
|
|
{
|
|
|
|
vkd3d_free(load);
|
|
|
|
return NULL;
|
|
|
|
}
|
2023-02-16 16:00:01 -08:00
|
|
|
hlsl_block_add_block(block, &comp_path_block);
|
2022-06-30 15:20:20 -07:00
|
|
|
|
2022-11-11 19:13:50 -08:00
|
|
|
hlsl_block_add_instr(block, &load->node);
|
2022-06-30 15:20:20 -07:00
|
|
|
|
2022-11-10 18:19:39 -08:00
|
|
|
return &load->node;
|
2021-03-17 22:22:20 -07:00
|
|
|
}
|
|
|
|
|
2022-11-10 18:38:31 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_resource_load(struct hlsl_ctx *ctx,
|
2022-09-26 16:20:22 -07:00
|
|
|
const struct hlsl_resource_load_params *params, const struct vkd3d_shader_location *loc)
|
2021-10-07 19:58:57 -07:00
|
|
|
{
|
|
|
|
struct hlsl_ir_resource_load *load;
|
|
|
|
|
|
|
|
if (!(load = hlsl_alloc(ctx, sizeof(*load))))
|
|
|
|
return NULL;
|
2022-09-26 17:21:34 -07:00
|
|
|
init_node(&load->node, HLSL_IR_RESOURCE_LOAD, params->format, loc);
|
2022-09-26 16:20:22 -07:00
|
|
|
load->load_type = params->type;
|
2023-03-10 17:09:58 -08:00
|
|
|
|
|
|
|
if (!hlsl_init_deref_from_index_chain(ctx, &load->resource, params->resource))
|
2022-09-26 14:56:19 -07:00
|
|
|
{
|
|
|
|
vkd3d_free(load);
|
|
|
|
return NULL;
|
|
|
|
}
|
2023-03-10 17:09:58 -08:00
|
|
|
|
|
|
|
if (params->sampler)
|
2022-09-26 14:56:19 -07:00
|
|
|
{
|
2023-03-10 17:09:58 -08:00
|
|
|
if (!hlsl_init_deref_from_index_chain(ctx, &load->sampler, params->sampler))
|
|
|
|
{
|
|
|
|
hlsl_cleanup_deref(&load->resource);
|
|
|
|
vkd3d_free(load);
|
|
|
|
return NULL;
|
|
|
|
}
|
2022-09-26 14:56:19 -07:00
|
|
|
}
|
2023-03-10 17:09:58 -08:00
|
|
|
|
2022-09-26 16:20:22 -07:00
|
|
|
hlsl_src_from_node(&load->coords, params->coords);
|
2023-04-27 01:15:36 -07:00
|
|
|
hlsl_src_from_node(&load->sample_index, params->sample_index);
|
2022-09-26 16:20:22 -07:00
|
|
|
hlsl_src_from_node(&load->texel_offset, params->texel_offset);
|
|
|
|
hlsl_src_from_node(&load->lod, params->lod);
|
2023-05-05 08:13:18 -07:00
|
|
|
hlsl_src_from_node(&load->ddx, params->ddx);
|
|
|
|
hlsl_src_from_node(&load->ddy, params->ddy);
|
2023-05-16 11:54:22 -07:00
|
|
|
hlsl_src_from_node(&load->cmp, params->cmp);
|
2022-11-25 15:38:33 -08:00
|
|
|
load->sampling_dim = params->sampling_dim;
|
|
|
|
if (load->sampling_dim == HLSL_SAMPLER_DIM_GENERIC)
|
|
|
|
load->sampling_dim = hlsl_deref_get_type(ctx, &load->resource)->sampler_dim;
|
2022-11-10 18:38:31 -08:00
|
|
|
return &load->node;
|
2021-08-16 18:28:47 -07:00
|
|
|
}
|
|
|
|
|
2022-11-10 18:39:11 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_resource_store(struct hlsl_ctx *ctx, const struct hlsl_deref *resource,
|
2021-08-15 10:08:32 -07:00
|
|
|
struct hlsl_ir_node *coords, struct hlsl_ir_node *value, const struct vkd3d_shader_location *loc)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_resource_store *store;
|
|
|
|
|
|
|
|
if (!(store = hlsl_alloc(ctx, sizeof(*store))))
|
|
|
|
return NULL;
|
|
|
|
init_node(&store->node, HLSL_IR_RESOURCE_STORE, NULL, loc);
|
|
|
|
hlsl_copy_deref(ctx, &store->resource, resource);
|
|
|
|
hlsl_src_from_node(&store->coords, coords);
|
|
|
|
hlsl_src_from_node(&store->value, value);
|
2022-11-10 18:39:11 -08:00
|
|
|
return &store->node;
|
2021-08-15 10:08:32 -07:00
|
|
|
}
|
|
|
|
|
2022-11-10 19:01:18 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_swizzle(struct hlsl_ctx *ctx, DWORD s, unsigned int components,
|
2021-12-01 08:14:58 -08:00
|
|
|
struct hlsl_ir_node *val, const struct vkd3d_shader_location *loc)
|
2021-01-30 11:51:32 -08:00
|
|
|
{
|
|
|
|
struct hlsl_ir_swizzle *swizzle;
|
2023-01-05 07:13:46 -08:00
|
|
|
struct hlsl_type *type;
|
2021-01-30 11:51:32 -08:00
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
if (!(swizzle = hlsl_alloc(ctx, sizeof(*swizzle))))
|
2021-01-30 11:51:32 -08:00
|
|
|
return NULL;
|
2023-01-05 07:13:46 -08:00
|
|
|
if (components == 1)
|
|
|
|
type = hlsl_get_scalar_type(ctx, val->data_type->base_type);
|
|
|
|
else
|
|
|
|
type = hlsl_get_vector_type(ctx, val->data_type->base_type, components);
|
|
|
|
init_node(&swizzle->node, HLSL_IR_SWIZZLE, type, loc);
|
2021-01-30 11:51:32 -08:00
|
|
|
hlsl_src_from_node(&swizzle->val, val);
|
|
|
|
swizzle->swizzle = s;
|
2022-11-10 19:01:18 -08:00
|
|
|
return &swizzle->node;
|
2021-01-30 11:51:32 -08:00
|
|
|
}
|
|
|
|
|
2023-03-10 17:09:58 -08:00
|
|
|
bool hlsl_index_is_noncontiguous(struct hlsl_ir_index *index)
|
|
|
|
{
|
|
|
|
struct hlsl_type *type = index->val.node->data_type;
|
|
|
|
|
|
|
|
return type->class == HLSL_CLASS_MATRIX && !hlsl_type_is_row_major(type);
|
|
|
|
}
|
|
|
|
|
2023-03-10 12:23:49 -08:00
|
|
|
bool hlsl_index_is_resource_access(struct hlsl_ir_index *index)
|
|
|
|
{
|
|
|
|
return index->val.node->data_type->class == HLSL_CLASS_OBJECT;
|
|
|
|
}
|
|
|
|
|
2023-02-24 11:39:56 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_index(struct hlsl_ctx *ctx, struct hlsl_ir_node *val,
|
|
|
|
struct hlsl_ir_node *idx, const struct vkd3d_shader_location *loc)
|
|
|
|
{
|
|
|
|
struct hlsl_type *type = val->data_type;
|
|
|
|
struct hlsl_ir_index *index;
|
|
|
|
|
|
|
|
if (!(index = hlsl_alloc(ctx, sizeof(*index))))
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
if (type->class == HLSL_CLASS_OBJECT)
|
|
|
|
type = type->e.resource_format;
|
|
|
|
else if (type->class == HLSL_CLASS_MATRIX)
|
|
|
|
type = hlsl_get_vector_type(ctx, type->base_type, type->dimx);
|
|
|
|
else
|
|
|
|
type = hlsl_get_element_type_from_path_index(ctx, type, idx);
|
|
|
|
|
|
|
|
init_node(&index->node, HLSL_IR_INDEX, type, loc);
|
|
|
|
hlsl_src_from_node(&index->val, val);
|
|
|
|
hlsl_src_from_node(&index->idx, idx);
|
|
|
|
return &index->node;
|
|
|
|
}
|
|
|
|
|
2022-11-10 18:08:44 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_jump(struct hlsl_ctx *ctx, enum hlsl_ir_jump_type type,
|
2023-06-08 00:42:58 -07:00
|
|
|
struct hlsl_ir_node *condition, const struct vkd3d_shader_location *loc)
|
2021-02-27 16:03:13 -08:00
|
|
|
{
|
|
|
|
struct hlsl_ir_jump *jump;
|
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
if (!(jump = hlsl_alloc(ctx, sizeof(*jump))))
|
2021-02-27 16:03:13 -08:00
|
|
|
return NULL;
|
2023-04-14 00:02:14 -07:00
|
|
|
init_node(&jump->node, HLSL_IR_JUMP, NULL, loc);
|
2021-02-27 16:03:13 -08:00
|
|
|
jump->type = type;
|
2023-06-08 00:42:58 -07:00
|
|
|
hlsl_src_from_node(&jump->condition, condition);
|
2022-11-10 18:08:44 -08:00
|
|
|
return &jump->node;
|
2021-02-27 16:03:13 -08:00
|
|
|
}
|
|
|
|
|
2022-11-10 18:36:14 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_loop(struct hlsl_ctx *ctx,
|
2022-11-10 18:35:35 -08:00
|
|
|
struct hlsl_block *block, const struct vkd3d_shader_location *loc)
|
2021-02-27 16:03:14 -08:00
|
|
|
{
|
|
|
|
struct hlsl_ir_loop *loop;
|
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
if (!(loop = hlsl_alloc(ctx, sizeof(*loop))))
|
2021-02-27 16:03:14 -08:00
|
|
|
return NULL;
|
2023-04-14 00:02:14 -07:00
|
|
|
init_node(&loop->node, HLSL_IR_LOOP, NULL, loc);
|
2023-02-16 15:52:15 -08:00
|
|
|
hlsl_block_init(&loop->body);
|
2022-11-10 18:35:35 -08:00
|
|
|
hlsl_block_add_block(&loop->body, block);
|
2022-11-10 18:36:14 -08:00
|
|
|
return &loop->node;
|
2021-02-27 16:03:14 -08:00
|
|
|
}
|
|
|
|
|
2021-09-11 14:56:04 -07:00
|
|
|
struct clone_instr_map
|
|
|
|
{
|
|
|
|
struct
|
|
|
|
{
|
|
|
|
const struct hlsl_ir_node *src;
|
|
|
|
struct hlsl_ir_node *dst;
|
|
|
|
} *instrs;
|
|
|
|
size_t count, capacity;
|
|
|
|
};
|
|
|
|
|
|
|
|
static struct hlsl_ir_node *clone_instr(struct hlsl_ctx *ctx,
|
|
|
|
struct clone_instr_map *map, const struct hlsl_ir_node *instr);
|
|
|
|
|
|
|
|
static bool clone_block(struct hlsl_ctx *ctx, struct hlsl_block *dst_block,
|
|
|
|
const struct hlsl_block *src_block, struct clone_instr_map *map)
|
|
|
|
{
|
|
|
|
const struct hlsl_ir_node *src;
|
|
|
|
struct hlsl_ir_node *dst;
|
|
|
|
|
2023-04-14 10:57:02 -07:00
|
|
|
hlsl_block_init(dst_block);
|
|
|
|
|
2021-09-11 14:56:04 -07:00
|
|
|
LIST_FOR_EACH_ENTRY(src, &src_block->instrs, struct hlsl_ir_node, entry)
|
|
|
|
{
|
|
|
|
if (!(dst = clone_instr(ctx, map, src)))
|
|
|
|
{
|
2023-04-14 11:02:15 -07:00
|
|
|
hlsl_block_cleanup(dst_block);
|
2021-09-11 14:56:04 -07:00
|
|
|
return false;
|
|
|
|
}
|
2023-06-25 16:09:34 -07:00
|
|
|
hlsl_block_add_instr(dst_block, dst);
|
2021-09-11 14:56:04 -07:00
|
|
|
|
|
|
|
if (!list_empty(&src->uses))
|
|
|
|
{
|
2023-08-01 13:36:24 -07:00
|
|
|
if (!hlsl_array_reserve(ctx, (void **)&map->instrs, &map->capacity, map->count + 1, sizeof(*map->instrs)))
|
2021-09-11 14:56:04 -07:00
|
|
|
{
|
2023-04-14 11:02:15 -07:00
|
|
|
hlsl_block_cleanup(dst_block);
|
2021-09-11 14:56:04 -07:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
map->instrs[map->count].dst = dst;
|
|
|
|
map->instrs[map->count].src = src;
|
|
|
|
++map->count;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
static struct hlsl_ir_node *map_instr(const struct clone_instr_map *map, struct hlsl_ir_node *src)
|
|
|
|
{
|
|
|
|
size_t i;
|
|
|
|
|
|
|
|
if (!src)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
for (i = 0; i < map->count; ++i)
|
|
|
|
{
|
|
|
|
if (map->instrs[i].src == src)
|
|
|
|
return map->instrs[i].dst;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* The block passed to hlsl_clone_block() should have been free of external
|
|
|
|
* references. */
|
|
|
|
vkd3d_unreachable();
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool clone_deref(struct hlsl_ctx *ctx, struct clone_instr_map *map,
|
|
|
|
struct hlsl_deref *dst, const struct hlsl_deref *src)
|
|
|
|
{
|
|
|
|
unsigned int i;
|
|
|
|
|
2023-10-04 11:31:46 -07:00
|
|
|
assert(!hlsl_deref_is_lowered(src));
|
2021-09-11 14:56:04 -07:00
|
|
|
|
|
|
|
if (!init_deref(ctx, dst, src->var, src->path_len))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
for (i = 0; i < src->path_len; ++i)
|
|
|
|
hlsl_src_from_node(&dst->path[i], map_instr(map, src->path[i].node));
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void clone_src(struct clone_instr_map *map, struct hlsl_src *dst, const struct hlsl_src *src)
|
|
|
|
{
|
|
|
|
hlsl_src_from_node(dst, map_instr(map, src->node));
|
|
|
|
}
|
|
|
|
|
|
|
|
static struct hlsl_ir_node *clone_call(struct hlsl_ctx *ctx, struct hlsl_ir_call *src)
|
|
|
|
{
|
|
|
|
return hlsl_new_call(ctx, src->decl, &src->node.loc);
|
|
|
|
}
|
|
|
|
|
|
|
|
static struct hlsl_ir_node *clone_constant(struct hlsl_ctx *ctx, struct hlsl_ir_constant *src)
|
|
|
|
{
|
2022-11-11 17:13:26 -08:00
|
|
|
return hlsl_new_constant(ctx, src->node.data_type, &src->value, &src->node.loc);
|
2021-09-11 14:56:04 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static struct hlsl_ir_node *clone_expr(struct hlsl_ctx *ctx, struct clone_instr_map *map, struct hlsl_ir_expr *src)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node *operands[HLSL_MAX_OPERANDS];
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
for (i = 0; i < ARRAY_SIZE(operands); ++i)
|
|
|
|
operands[i] = map_instr(map, src->operands[i].node);
|
|
|
|
|
|
|
|
return hlsl_new_expr(ctx, src->op, operands, src->node.data_type, &src->node.loc);
|
|
|
|
}
|
|
|
|
|
|
|
|
static struct hlsl_ir_node *clone_if(struct hlsl_ctx *ctx, struct clone_instr_map *map, struct hlsl_ir_if *src)
|
|
|
|
{
|
2022-11-10 18:04:22 -08:00
|
|
|
struct hlsl_block then_block, else_block;
|
2022-11-10 18:05:53 -08:00
|
|
|
struct hlsl_ir_node *dst;
|
2021-09-11 14:56:04 -07:00
|
|
|
|
2022-11-10 18:04:22 -08:00
|
|
|
if (!clone_block(ctx, &then_block, &src->then_block, map))
|
2021-09-11 14:56:04 -07:00
|
|
|
return NULL;
|
2022-11-10 18:04:22 -08:00
|
|
|
if (!clone_block(ctx, &else_block, &src->else_block, map))
|
|
|
|
{
|
|
|
|
hlsl_block_cleanup(&then_block);
|
|
|
|
return NULL;
|
|
|
|
}
|
2021-09-11 14:56:04 -07:00
|
|
|
|
2022-11-10 18:04:22 -08:00
|
|
|
if (!(dst = hlsl_new_if(ctx, map_instr(map, src->condition.node), &then_block, &else_block, &src->node.loc)))
|
2021-09-11 14:56:04 -07:00
|
|
|
{
|
2022-11-10 18:04:22 -08:00
|
|
|
hlsl_block_cleanup(&then_block);
|
|
|
|
hlsl_block_cleanup(&else_block);
|
2021-09-11 14:56:04 -07:00
|
|
|
return NULL;
|
|
|
|
}
|
2022-11-10 18:04:22 -08:00
|
|
|
|
2022-11-10 18:05:53 -08:00
|
|
|
return dst;
|
2021-09-11 14:56:04 -07:00
|
|
|
}
|
|
|
|
|
2023-06-08 00:42:58 -07:00
|
|
|
static struct hlsl_ir_node *clone_jump(struct hlsl_ctx *ctx, struct clone_instr_map *map, struct hlsl_ir_jump *src)
|
2021-09-11 14:56:04 -07:00
|
|
|
{
|
2023-06-08 00:42:58 -07:00
|
|
|
return hlsl_new_jump(ctx, src->type, map_instr(map, src->condition.node), &src->node.loc);
|
2021-09-11 14:56:04 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static struct hlsl_ir_node *clone_load(struct hlsl_ctx *ctx, struct clone_instr_map *map, struct hlsl_ir_load *src)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_load *dst;
|
|
|
|
|
|
|
|
if (!(dst = hlsl_alloc(ctx, sizeof(*dst))))
|
|
|
|
return NULL;
|
|
|
|
init_node(&dst->node, HLSL_IR_LOAD, src->node.data_type, &src->node.loc);
|
|
|
|
|
|
|
|
if (!clone_deref(ctx, map, &dst->src, &src->src))
|
|
|
|
{
|
|
|
|
vkd3d_free(dst);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
return &dst->node;
|
|
|
|
}
|
|
|
|
|
|
|
|
static struct hlsl_ir_node *clone_loop(struct hlsl_ctx *ctx, struct clone_instr_map *map, struct hlsl_ir_loop *src)
|
|
|
|
{
|
2022-11-10 18:36:14 -08:00
|
|
|
struct hlsl_ir_node *dst;
|
2022-11-10 18:35:35 -08:00
|
|
|
struct hlsl_block body;
|
2021-09-11 14:56:04 -07:00
|
|
|
|
2022-11-10 18:35:35 -08:00
|
|
|
if (!clone_block(ctx, &body, &src->body, map))
|
2021-09-11 14:56:04 -07:00
|
|
|
return NULL;
|
2022-11-10 18:35:35 -08:00
|
|
|
|
|
|
|
if (!(dst = hlsl_new_loop(ctx, &body, &src->node.loc)))
|
2021-09-11 14:56:04 -07:00
|
|
|
{
|
2022-11-10 18:35:35 -08:00
|
|
|
hlsl_block_cleanup(&body);
|
2021-09-11 14:56:04 -07:00
|
|
|
return NULL;
|
|
|
|
}
|
2022-11-10 18:36:14 -08:00
|
|
|
return dst;
|
2021-09-11 14:56:04 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static struct hlsl_ir_node *clone_resource_load(struct hlsl_ctx *ctx,
|
|
|
|
struct clone_instr_map *map, struct hlsl_ir_resource_load *src)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_resource_load *dst;
|
|
|
|
|
|
|
|
if (!(dst = hlsl_alloc(ctx, sizeof(*dst))))
|
|
|
|
return NULL;
|
|
|
|
init_node(&dst->node, HLSL_IR_RESOURCE_LOAD, src->node.data_type, &src->node.loc);
|
|
|
|
dst->load_type = src->load_type;
|
|
|
|
if (!clone_deref(ctx, map, &dst->resource, &src->resource))
|
|
|
|
{
|
|
|
|
vkd3d_free(dst);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
if (!clone_deref(ctx, map, &dst->sampler, &src->sampler))
|
|
|
|
{
|
|
|
|
hlsl_cleanup_deref(&dst->resource);
|
|
|
|
vkd3d_free(dst);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
clone_src(map, &dst->coords, &src->coords);
|
|
|
|
clone_src(map, &dst->lod, &src->lod);
|
2023-05-05 08:13:18 -07:00
|
|
|
clone_src(map, &dst->ddx, &src->ddx);
|
|
|
|
clone_src(map, &dst->ddy, &src->ddy);
|
2023-04-27 01:15:36 -07:00
|
|
|
clone_src(map, &dst->sample_index, &src->sample_index);
|
2023-05-16 11:54:22 -07:00
|
|
|
clone_src(map, &dst->cmp, &src->cmp);
|
2021-09-11 14:56:04 -07:00
|
|
|
clone_src(map, &dst->texel_offset, &src->texel_offset);
|
2022-11-25 15:38:33 -08:00
|
|
|
dst->sampling_dim = src->sampling_dim;
|
2021-09-11 14:56:04 -07:00
|
|
|
return &dst->node;
|
|
|
|
}
|
|
|
|
|
|
|
|
static struct hlsl_ir_node *clone_resource_store(struct hlsl_ctx *ctx,
|
|
|
|
struct clone_instr_map *map, struct hlsl_ir_resource_store *src)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_resource_store *dst;
|
|
|
|
|
|
|
|
if (!(dst = hlsl_alloc(ctx, sizeof(*dst))))
|
|
|
|
return NULL;
|
|
|
|
init_node(&dst->node, HLSL_IR_RESOURCE_STORE, NULL, &src->node.loc);
|
|
|
|
if (!clone_deref(ctx, map, &dst->resource, &src->resource))
|
|
|
|
{
|
|
|
|
vkd3d_free(dst);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
clone_src(map, &dst->coords, &src->coords);
|
|
|
|
clone_src(map, &dst->value, &src->value);
|
|
|
|
return &dst->node;
|
|
|
|
}
|
|
|
|
|
|
|
|
static struct hlsl_ir_node *clone_store(struct hlsl_ctx *ctx, struct clone_instr_map *map, struct hlsl_ir_store *src)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_store *dst;
|
|
|
|
|
|
|
|
if (!(dst = hlsl_alloc(ctx, sizeof(*dst))))
|
|
|
|
return NULL;
|
|
|
|
init_node(&dst->node, HLSL_IR_STORE, NULL, &src->node.loc);
|
|
|
|
|
|
|
|
if (!clone_deref(ctx, map, &dst->lhs, &src->lhs))
|
|
|
|
{
|
|
|
|
vkd3d_free(dst);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
clone_src(map, &dst->rhs, &src->rhs);
|
|
|
|
dst->writemask = src->writemask;
|
|
|
|
return &dst->node;
|
|
|
|
}
|
|
|
|
|
|
|
|
static struct hlsl_ir_node *clone_swizzle(struct hlsl_ctx *ctx,
|
|
|
|
struct clone_instr_map *map, struct hlsl_ir_swizzle *src)
|
|
|
|
{
|
2022-11-10 19:01:18 -08:00
|
|
|
return hlsl_new_swizzle(ctx, src->swizzle, src->node.data_type->dimx,
|
|
|
|
map_instr(map, src->val.node), &src->node.loc);
|
2021-09-11 14:56:04 -07:00
|
|
|
}
|
|
|
|
|
2023-02-24 11:39:56 -08:00
|
|
|
static struct hlsl_ir_node *clone_index(struct hlsl_ctx *ctx, struct clone_instr_map *map,
|
|
|
|
struct hlsl_ir_index *src)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node *dst;
|
|
|
|
|
|
|
|
if (!(dst = hlsl_new_index(ctx, map_instr(map, src->val.node), map_instr(map, src->idx.node),
|
|
|
|
&src->node.loc)))
|
|
|
|
return NULL;
|
|
|
|
return dst;
|
|
|
|
}
|
|
|
|
|
2023-10-11 04:51:51 -07:00
|
|
|
void hlsl_free_ir_switch_case(struct hlsl_ir_switch_case *c)
|
|
|
|
{
|
|
|
|
hlsl_block_cleanup(&c->body);
|
|
|
|
list_remove(&c->entry);
|
|
|
|
vkd3d_free(c);
|
|
|
|
}
|
|
|
|
|
|
|
|
void hlsl_cleanup_ir_switch_cases(struct list *cases)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_switch_case *c, *next;
|
|
|
|
|
|
|
|
LIST_FOR_EACH_ENTRY_SAFE(c, next, cases, struct hlsl_ir_switch_case, entry)
|
|
|
|
{
|
|
|
|
hlsl_free_ir_switch_case(c);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static struct hlsl_ir_node *clone_switch(struct hlsl_ctx *ctx,
|
|
|
|
struct clone_instr_map *map, struct hlsl_ir_switch *s)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_switch_case *c, *d;
|
|
|
|
struct hlsl_ir_node *ret;
|
|
|
|
struct hlsl_block body;
|
|
|
|
struct list cases;
|
|
|
|
|
|
|
|
list_init(&cases);
|
|
|
|
|
|
|
|
LIST_FOR_EACH_ENTRY(c, &s->cases, struct hlsl_ir_switch_case, entry)
|
|
|
|
{
|
|
|
|
if (!(clone_block(ctx, &body, &c->body, map)))
|
|
|
|
{
|
|
|
|
hlsl_cleanup_ir_switch_cases(&cases);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
d = hlsl_new_switch_case(ctx, c->value, c->is_default, &body, &c->loc);
|
|
|
|
hlsl_block_cleanup(&body);
|
|
|
|
if (!d)
|
|
|
|
{
|
|
|
|
hlsl_cleanup_ir_switch_cases(&cases);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
list_add_tail(&cases, &d->entry);
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = hlsl_new_switch(ctx, map_instr(map, s->selector.node), &cases, &s->node.loc);
|
|
|
|
hlsl_cleanup_ir_switch_cases(&cases);
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2021-09-11 14:56:04 -07:00
|
|
|
static struct hlsl_ir_node *clone_instr(struct hlsl_ctx *ctx,
|
|
|
|
struct clone_instr_map *map, const struct hlsl_ir_node *instr)
|
|
|
|
{
|
|
|
|
switch (instr->type)
|
|
|
|
{
|
|
|
|
case HLSL_IR_CALL:
|
|
|
|
return clone_call(ctx, hlsl_ir_call(instr));
|
|
|
|
|
|
|
|
case HLSL_IR_CONSTANT:
|
|
|
|
return clone_constant(ctx, hlsl_ir_constant(instr));
|
|
|
|
|
|
|
|
case HLSL_IR_EXPR:
|
|
|
|
return clone_expr(ctx, map, hlsl_ir_expr(instr));
|
|
|
|
|
|
|
|
case HLSL_IR_IF:
|
|
|
|
return clone_if(ctx, map, hlsl_ir_if(instr));
|
|
|
|
|
2023-02-24 11:39:56 -08:00
|
|
|
case HLSL_IR_INDEX:
|
|
|
|
return clone_index(ctx, map, hlsl_ir_index(instr));
|
|
|
|
|
2021-09-11 14:56:04 -07:00
|
|
|
case HLSL_IR_JUMP:
|
2023-06-08 00:42:58 -07:00
|
|
|
return clone_jump(ctx, map, hlsl_ir_jump(instr));
|
2021-09-11 14:56:04 -07:00
|
|
|
|
|
|
|
case HLSL_IR_LOAD:
|
|
|
|
return clone_load(ctx, map, hlsl_ir_load(instr));
|
|
|
|
|
|
|
|
case HLSL_IR_LOOP:
|
|
|
|
return clone_loop(ctx, map, hlsl_ir_loop(instr));
|
|
|
|
|
|
|
|
case HLSL_IR_RESOURCE_LOAD:
|
|
|
|
return clone_resource_load(ctx, map, hlsl_ir_resource_load(instr));
|
|
|
|
|
|
|
|
case HLSL_IR_RESOURCE_STORE:
|
|
|
|
return clone_resource_store(ctx, map, hlsl_ir_resource_store(instr));
|
|
|
|
|
|
|
|
case HLSL_IR_STORE:
|
|
|
|
return clone_store(ctx, map, hlsl_ir_store(instr));
|
|
|
|
|
2023-10-11 04:51:51 -07:00
|
|
|
case HLSL_IR_SWITCH:
|
|
|
|
return clone_switch(ctx, map, hlsl_ir_switch(instr));
|
|
|
|
|
2021-09-11 14:56:04 -07:00
|
|
|
case HLSL_IR_SWIZZLE:
|
|
|
|
return clone_swizzle(ctx, map, hlsl_ir_swizzle(instr));
|
|
|
|
}
|
|
|
|
|
|
|
|
vkd3d_unreachable();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool hlsl_clone_block(struct hlsl_ctx *ctx, struct hlsl_block *dst_block, const struct hlsl_block *src_block)
|
|
|
|
{
|
|
|
|
struct clone_instr_map map = {0};
|
|
|
|
bool ret;
|
|
|
|
|
|
|
|
ret = clone_block(ctx, dst_block, src_block, &map);
|
|
|
|
vkd3d_free(map.instrs);
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2023-01-31 17:27:01 -08:00
|
|
|
struct hlsl_ir_function_decl *hlsl_new_func_decl(struct hlsl_ctx *ctx,
|
|
|
|
struct hlsl_type *return_type, const struct hlsl_func_parameters *parameters,
|
|
|
|
const struct hlsl_semantic *semantic, const struct vkd3d_shader_location *loc)
|
2021-01-30 11:51:32 -08:00
|
|
|
{
|
2022-11-10 18:55:03 -08:00
|
|
|
struct hlsl_ir_node *constant, *store;
|
2021-01-30 11:51:32 -08:00
|
|
|
struct hlsl_ir_function_decl *decl;
|
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
if (!(decl = hlsl_alloc(ctx, sizeof(*decl))))
|
2021-01-30 11:51:32 -08:00
|
|
|
return NULL;
|
2023-02-16 15:52:15 -08:00
|
|
|
hlsl_block_init(&decl->body);
|
2021-01-30 11:51:32 -08:00
|
|
|
decl->return_type = return_type;
|
2023-01-31 17:27:01 -08:00
|
|
|
decl->parameters = *parameters;
|
2021-08-16 12:52:10 -07:00
|
|
|
decl->loc = *loc;
|
2021-01-30 11:51:32 -08:00
|
|
|
|
2021-10-03 17:19:06 -07:00
|
|
|
if (!hlsl_types_are_equal(return_type, ctx->builtin_types.Void))
|
2021-01-30 11:51:32 -08:00
|
|
|
{
|
2021-08-16 12:52:10 -07:00
|
|
|
if (!(decl->return_var = hlsl_new_synthetic_var(ctx, "retval", return_type, loc)))
|
2021-01-30 11:51:32 -08:00
|
|
|
{
|
|
|
|
vkd3d_free(decl);
|
|
|
|
return NULL;
|
|
|
|
}
|
2022-10-30 15:46:38 -07:00
|
|
|
decl->return_var->semantic = *semantic;
|
2021-01-30 11:51:32 -08:00
|
|
|
}
|
|
|
|
|
2021-09-13 21:08:34 -07:00
|
|
|
if (!(decl->early_return_var = hlsl_new_synthetic_var(ctx, "early_return",
|
|
|
|
hlsl_get_scalar_type(ctx, HLSL_TYPE_BOOL), loc)))
|
|
|
|
return decl;
|
|
|
|
|
|
|
|
if (!(constant = hlsl_new_bool_constant(ctx, false, loc)))
|
|
|
|
return decl;
|
2022-11-10 17:37:41 -08:00
|
|
|
hlsl_block_add_instr(&decl->body, constant);
|
2021-09-13 21:08:34 -07:00
|
|
|
|
2022-11-10 17:37:41 -08:00
|
|
|
if (!(store = hlsl_new_simple_store(ctx, decl->early_return_var, constant)))
|
2021-09-13 21:08:34 -07:00
|
|
|
return decl;
|
2022-11-10 18:55:03 -08:00
|
|
|
hlsl_block_add_instr(&decl->body, store);
|
2021-09-13 21:08:34 -07:00
|
|
|
|
2021-01-30 11:51:32 -08:00
|
|
|
return decl;
|
|
|
|
}
|
|
|
|
|
2021-06-21 21:37:09 -07:00
|
|
|
struct hlsl_buffer *hlsl_new_buffer(struct hlsl_ctx *ctx, enum hlsl_buffer_type type, const char *name,
|
2023-04-14 00:02:14 -07:00
|
|
|
const struct hlsl_reg_reservation *reservation, const struct vkd3d_shader_location *loc)
|
2021-06-21 21:37:09 -07:00
|
|
|
{
|
|
|
|
struct hlsl_buffer *buffer;
|
|
|
|
|
|
|
|
if (!(buffer = hlsl_alloc(ctx, sizeof(*buffer))))
|
|
|
|
return NULL;
|
|
|
|
buffer->type = type;
|
|
|
|
buffer->name = name;
|
|
|
|
if (reservation)
|
|
|
|
buffer->reservation = *reservation;
|
2023-04-14 00:02:14 -07:00
|
|
|
buffer->loc = *loc;
|
2021-06-21 21:37:09 -07:00
|
|
|
list_add_tail(&ctx->buffers, &buffer->entry);
|
|
|
|
return buffer;
|
|
|
|
}
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
static int compare_hlsl_types_rb(const void *key, const struct rb_entry *entry)
|
|
|
|
{
|
|
|
|
const struct hlsl_type *type = RB_ENTRY_VALUE(entry, const struct hlsl_type, scope_entry);
|
|
|
|
const char *name = key;
|
|
|
|
|
|
|
|
if (name == type->name)
|
|
|
|
return 0;
|
|
|
|
|
|
|
|
if (!name || !type->name)
|
|
|
|
{
|
|
|
|
ERR("hlsl_type without a name in a scope?\n");
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
return strcmp(name, type->name);
|
|
|
|
}
|
|
|
|
|
2023-01-31 20:18:35 -08:00
|
|
|
static struct hlsl_scope *hlsl_new_scope(struct hlsl_ctx *ctx, struct hlsl_scope *upper)
|
|
|
|
{
|
|
|
|
struct hlsl_scope *scope;
|
|
|
|
|
|
|
|
if (!(scope = hlsl_alloc(ctx, sizeof(*scope))))
|
|
|
|
return NULL;
|
|
|
|
list_init(&scope->vars);
|
|
|
|
rb_init(&scope->types, compare_hlsl_types_rb);
|
|
|
|
scope->upper = upper;
|
|
|
|
list_add_tail(&ctx->scopes, &scope->entry);
|
|
|
|
return scope;
|
|
|
|
}
|
|
|
|
|
2021-02-04 14:33:53 -08:00
|
|
|
void hlsl_push_scope(struct hlsl_ctx *ctx)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
struct hlsl_scope *new_scope;
|
|
|
|
|
2023-01-31 20:18:35 -08:00
|
|
|
if (!(new_scope = hlsl_new_scope(ctx, ctx->cur_scope)))
|
2021-01-27 08:29:44 -08:00
|
|
|
return;
|
2023-01-31 20:18:35 -08:00
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
TRACE("Pushing a new scope.\n");
|
|
|
|
ctx->cur_scope = new_scope;
|
|
|
|
}
|
|
|
|
|
2021-02-04 14:33:53 -08:00
|
|
|
void hlsl_pop_scope(struct hlsl_ctx *ctx)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
struct hlsl_scope *prev_scope = ctx->cur_scope->upper;
|
|
|
|
|
2021-02-02 14:11:16 -08:00
|
|
|
assert(prev_scope);
|
2021-01-27 08:29:44 -08:00
|
|
|
TRACE("Popping current scope.\n");
|
|
|
|
ctx->cur_scope = prev_scope;
|
|
|
|
}
|
|
|
|
|
|
|
|
static int compare_param_hlsl_types(const struct hlsl_type *t1, const struct hlsl_type *t2)
|
|
|
|
{
|
2021-10-07 19:58:55 -07:00
|
|
|
int r;
|
|
|
|
|
2022-11-11 17:31:55 -08:00
|
|
|
if ((r = vkd3d_u32_compare(t1->class, t2->class)))
|
2023-09-08 14:40:22 -07:00
|
|
|
return r;
|
2022-02-28 03:23:48 -08:00
|
|
|
if ((r = vkd3d_u32_compare(t1->base_type, t2->base_type)))
|
|
|
|
return r;
|
2022-01-27 10:31:21 -08:00
|
|
|
if (t1->base_type == HLSL_TYPE_SAMPLER || t1->base_type == HLSL_TYPE_TEXTURE)
|
2021-10-07 19:58:55 -07:00
|
|
|
{
|
2022-02-28 03:23:48 -08:00
|
|
|
if ((r = vkd3d_u32_compare(t1->sampler_dim, t2->sampler_dim)))
|
|
|
|
return r;
|
2021-10-07 19:58:55 -07:00
|
|
|
if (t1->base_type == HLSL_TYPE_TEXTURE && t1->sampler_dim != HLSL_SAMPLER_DIM_GENERIC
|
|
|
|
&& (r = compare_param_hlsl_types(t1->e.resource_format, t2->e.resource_format)))
|
|
|
|
return r;
|
|
|
|
}
|
2022-02-28 03:23:48 -08:00
|
|
|
if ((r = vkd3d_u32_compare(t1->dimx, t2->dimx)))
|
|
|
|
return r;
|
|
|
|
if ((r = vkd3d_u32_compare(t1->dimy, t2->dimy)))
|
|
|
|
return r;
|
2022-11-11 17:31:55 -08:00
|
|
|
if (t1->class == HLSL_CLASS_STRUCT)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2022-07-14 18:23:43 -07:00
|
|
|
size_t i;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2022-07-14 18:23:43 -07:00
|
|
|
if (t1->e.record.field_count != t2->e.record.field_count)
|
|
|
|
return t1->e.record.field_count - t2->e.record.field_count;
|
|
|
|
|
|
|
|
for (i = 0; i < t1->e.record.field_count; ++i)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2022-07-14 18:23:43 -07:00
|
|
|
const struct hlsl_struct_field *field1 = &t1->e.record.fields[i];
|
|
|
|
const struct hlsl_struct_field *field2 = &t2->e.record.fields[i];
|
|
|
|
|
|
|
|
if ((r = compare_param_hlsl_types(field1->type, field2->type)))
|
2021-01-27 08:29:44 -08:00
|
|
|
return r;
|
2022-07-14 18:23:43 -07:00
|
|
|
|
|
|
|
if ((r = strcmp(field1->name, field2->name)))
|
2021-01-27 08:29:44 -08:00
|
|
|
return r;
|
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
2022-11-11 17:31:55 -08:00
|
|
|
if (t1->class == HLSL_CLASS_ARRAY)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2022-02-28 03:23:48 -08:00
|
|
|
if ((r = vkd3d_u32_compare(t1->e.array.elements_count, t2->e.array.elements_count)))
|
|
|
|
return r;
|
2021-01-27 08:29:44 -08:00
|
|
|
return compare_param_hlsl_types(t1->e.array.type, t2->e.array.type);
|
|
|
|
}
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2023-09-08 14:47:19 -07:00
|
|
|
static bool func_decl_matches(const struct hlsl_ir_function_decl *decl,
|
vkd3d-shader/hlsl: Store function overloads in a list.
The choice to store them in an rbtree was made early on. It does not seem likely
that HLSL programs would define many overloads for any of their functions, but I
suspect the idea was rather that intrinsics would be defined as plain
hlsl_ir_function_decl structures [cf. 447463e5900ca6a636998a65429b8a08a5441657]
and that some intrinsics that could operate on any type would therefore need
many overrides.
This is not how we deal with intrinsics, however. When the first intrinsics were
implemented I made the choice disregard this intended design, and instead match
and convert their types manually, in C. Nothing that has happened in the time
since has led me to question that choice, and in fact, the flexibility with
which we must accommodate functions has led me to believe that matching in this
way was definitely the right choice. The main other designs I see would have
been:
* define each intrinsic variant separately using existing HLSL types. Besides
efficiency concerns (i.e. this would take more space in memory, and would take
longer to generate each variant), the normal type-matching rules don't really
apply to intrinsics.
[For example: elementwise intrinsics like abs() return the same type as the
input, including preserving the distinction between float and float1. It is
legal to define separate HLSL overloads taking float and float1, but trying to
invoke these functions yields an "ambiguous function call" error.]
* introduce new (semi-)generic types. This is far more code and ends up acting
like our current scheme (with helpers) in a slightly more complex form.
So I think we can go ahead and rip out this vestige of the original design for
intrinsics.
As for why to change it: rbtrees are simply more complex to deal with, and it
seems unlikely to me that the difference is going to matter. I do not expect any
program to define large quantities of intrinsics; linked list search should be
good enough.
2023-09-08 14:27:10 -07:00
|
|
|
const struct hlsl_func_parameters *parameters)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2023-01-31 17:27:01 -08:00
|
|
|
size_t i;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2023-09-08 14:47:19 -07:00
|
|
|
if (parameters->count != decl->parameters.count)
|
|
|
|
return false;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2023-01-31 17:27:01 -08:00
|
|
|
for (i = 0; i < parameters->count; ++i)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2023-09-08 14:47:19 -07:00
|
|
|
if (compare_param_hlsl_types(parameters->vars[i]->data_type, decl->parameters.vars[i]->data_type))
|
|
|
|
return false;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
2023-09-08 14:47:19 -07:00
|
|
|
return true;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
vkd3d-shader/hlsl: Store function overloads in a list.
The choice to store them in an rbtree was made early on. It does not seem likely
that HLSL programs would define many overloads for any of their functions, but I
suspect the idea was rather that intrinsics would be defined as plain
hlsl_ir_function_decl structures [cf. 447463e5900ca6a636998a65429b8a08a5441657]
and that some intrinsics that could operate on any type would therefore need
many overrides.
This is not how we deal with intrinsics, however. When the first intrinsics were
implemented I made the choice disregard this intended design, and instead match
and convert their types manually, in C. Nothing that has happened in the time
since has led me to question that choice, and in fact, the flexibility with
which we must accommodate functions has led me to believe that matching in this
way was definitely the right choice. The main other designs I see would have
been:
* define each intrinsic variant separately using existing HLSL types. Besides
efficiency concerns (i.e. this would take more space in memory, and would take
longer to generate each variant), the normal type-matching rules don't really
apply to intrinsics.
[For example: elementwise intrinsics like abs() return the same type as the
input, including preserving the distinction between float and float1. It is
legal to define separate HLSL overloads taking float and float1, but trying to
invoke these functions yields an "ambiguous function call" error.]
* introduce new (semi-)generic types. This is far more code and ends up acting
like our current scheme (with helpers) in a slightly more complex form.
So I think we can go ahead and rip out this vestige of the original design for
intrinsics.
As for why to change it: rbtrees are simply more complex to deal with, and it
seems unlikely to me that the difference is going to matter. I do not expect any
program to define large quantities of intrinsics; linked list search should be
good enough.
2023-09-08 14:27:10 -07:00
|
|
|
struct hlsl_ir_function_decl *hlsl_get_func_decl(struct hlsl_ctx *ctx, const char *name,
|
|
|
|
const struct hlsl_func_parameters *parameters)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_function_decl *decl;
|
|
|
|
struct hlsl_ir_function *func;
|
|
|
|
|
|
|
|
if (!(func = hlsl_get_function(ctx, name)))
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
LIST_FOR_EACH_ENTRY(decl, &func->overloads, struct hlsl_ir_function_decl, entry)
|
|
|
|
{
|
2023-09-08 14:47:19 -07:00
|
|
|
if (func_decl_matches(decl, parameters))
|
vkd3d-shader/hlsl: Store function overloads in a list.
The choice to store them in an rbtree was made early on. It does not seem likely
that HLSL programs would define many overloads for any of their functions, but I
suspect the idea was rather that intrinsics would be defined as plain
hlsl_ir_function_decl structures [cf. 447463e5900ca6a636998a65429b8a08a5441657]
and that some intrinsics that could operate on any type would therefore need
many overrides.
This is not how we deal with intrinsics, however. When the first intrinsics were
implemented I made the choice disregard this intended design, and instead match
and convert their types manually, in C. Nothing that has happened in the time
since has led me to question that choice, and in fact, the flexibility with
which we must accommodate functions has led me to believe that matching in this
way was definitely the right choice. The main other designs I see would have
been:
* define each intrinsic variant separately using existing HLSL types. Besides
efficiency concerns (i.e. this would take more space in memory, and would take
longer to generate each variant), the normal type-matching rules don't really
apply to intrinsics.
[For example: elementwise intrinsics like abs() return the same type as the
input, including preserving the distinction between float and float1. It is
legal to define separate HLSL overloads taking float and float1, but trying to
invoke these functions yields an "ambiguous function call" error.]
* introduce new (semi-)generic types. This is far more code and ends up acting
like our current scheme (with helpers) in a slightly more complex form.
So I think we can go ahead and rip out this vestige of the original design for
intrinsics.
As for why to change it: rbtrees are simply more complex to deal with, and it
seems unlikely to me that the difference is going to matter. I do not expect any
program to define large quantities of intrinsics; linked list search should be
good enough.
2023-09-08 14:27:10 -07:00
|
|
|
return decl;
|
|
|
|
}
|
|
|
|
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:22 -07:00
|
|
|
struct vkd3d_string_buffer *hlsl_type_to_string(struct hlsl_ctx *ctx, const struct hlsl_type *type)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2023-05-02 18:47:05 -07:00
|
|
|
struct vkd3d_string_buffer *string, *inner_string;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2021-11-08 19:39:27 -08:00
|
|
|
static const char *const base_types[] =
|
2021-02-16 21:52:05 -08:00
|
|
|
{
|
2021-11-08 19:39:27 -08:00
|
|
|
[HLSL_TYPE_FLOAT] = "float",
|
|
|
|
[HLSL_TYPE_HALF] = "half",
|
|
|
|
[HLSL_TYPE_DOUBLE] = "double",
|
|
|
|
[HLSL_TYPE_INT] = "int",
|
|
|
|
[HLSL_TYPE_UINT] = "uint",
|
|
|
|
[HLSL_TYPE_BOOL] = "bool",
|
2021-02-16 21:52:05 -08:00
|
|
|
};
|
|
|
|
|
2021-05-20 22:32:22 -07:00
|
|
|
if (!(string = hlsl_get_string_buffer(ctx)))
|
2021-02-27 16:03:09 -08:00
|
|
|
return NULL;
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
if (type->name)
|
2021-02-27 16:03:09 -08:00
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(string, "%s", type->name);
|
|
|
|
return string;
|
|
|
|
}
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2022-11-11 17:31:55 -08:00
|
|
|
switch (type->class)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2021-02-12 12:38:50 -08:00
|
|
|
case HLSL_CLASS_SCALAR:
|
2021-02-27 16:03:11 -08:00
|
|
|
assert(type->base_type < ARRAY_SIZE(base_types));
|
2021-02-27 16:03:09 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "%s", base_types[type->base_type]);
|
|
|
|
return string;
|
2021-02-12 12:38:50 -08:00
|
|
|
|
|
|
|
case HLSL_CLASS_VECTOR:
|
2021-02-27 16:03:11 -08:00
|
|
|
assert(type->base_type < ARRAY_SIZE(base_types));
|
2021-02-27 16:03:09 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "%s%u", base_types[type->base_type], type->dimx);
|
2021-02-12 12:38:50 -08:00
|
|
|
return string;
|
|
|
|
|
|
|
|
case HLSL_CLASS_MATRIX:
|
2021-02-27 16:03:11 -08:00
|
|
|
assert(type->base_type < ARRAY_SIZE(base_types));
|
2022-02-25 07:25:31 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "%s%ux%u", base_types[type->base_type], type->dimy, type->dimx);
|
2021-02-12 12:38:50 -08:00
|
|
|
return string;
|
|
|
|
|
|
|
|
case HLSL_CLASS_ARRAY:
|
2021-02-16 21:52:04 -08:00
|
|
|
{
|
|
|
|
const struct hlsl_type *t;
|
|
|
|
|
2022-11-11 17:31:55 -08:00
|
|
|
for (t = type; t->class == HLSL_CLASS_ARRAY; t = t->e.array.type)
|
2021-02-27 16:03:09 -08:00
|
|
|
;
|
2021-02-16 21:52:04 -08:00
|
|
|
|
2021-05-20 22:32:22 -07:00
|
|
|
if ((inner_string = hlsl_type_to_string(ctx, t)))
|
2021-02-16 21:52:04 -08:00
|
|
|
{
|
2021-02-27 16:03:09 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "%s", inner_string->buffer);
|
2021-05-20 22:32:22 -07:00
|
|
|
hlsl_release_string_buffer(ctx, inner_string);
|
2021-02-16 21:52:04 -08:00
|
|
|
}
|
|
|
|
|
2022-11-11 17:31:55 -08:00
|
|
|
for (t = type; t->class == HLSL_CLASS_ARRAY; t = t->e.array.type)
|
2022-08-08 15:11:03 -07:00
|
|
|
{
|
|
|
|
if (t->e.array.elements_count == HLSL_ARRAY_ELEMENTS_COUNT_IMPLICIT)
|
|
|
|
vkd3d_string_buffer_printf(string, "[]");
|
|
|
|
else
|
|
|
|
vkd3d_string_buffer_printf(string, "[%u]", t->e.array.elements_count);
|
|
|
|
}
|
2021-02-12 12:38:50 -08:00
|
|
|
return string;
|
2021-02-16 21:52:04 -08:00
|
|
|
}
|
2021-02-12 12:38:50 -08:00
|
|
|
|
|
|
|
case HLSL_CLASS_STRUCT:
|
2021-02-27 16:03:09 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "<anonymous struct>");
|
|
|
|
return string;
|
2021-02-12 12:38:50 -08:00
|
|
|
|
2021-10-07 19:58:56 -07:00
|
|
|
case HLSL_CLASS_OBJECT:
|
|
|
|
{
|
2021-11-08 19:39:27 -08:00
|
|
|
static const char *const dimensions[] =
|
2021-10-07 19:58:56 -07:00
|
|
|
{
|
2022-01-27 10:31:23 -08:00
|
|
|
[HLSL_SAMPLER_DIM_1D] = "1D",
|
|
|
|
[HLSL_SAMPLER_DIM_2D] = "2D",
|
|
|
|
[HLSL_SAMPLER_DIM_3D] = "3D",
|
|
|
|
[HLSL_SAMPLER_DIM_CUBE] = "Cube",
|
|
|
|
[HLSL_SAMPLER_DIM_1DARRAY] = "1DArray",
|
|
|
|
[HLSL_SAMPLER_DIM_2DARRAY] = "2DArray",
|
|
|
|
[HLSL_SAMPLER_DIM_2DMS] = "2DMS",
|
|
|
|
[HLSL_SAMPLER_DIM_2DMSARRAY] = "2DMSArray",
|
|
|
|
[HLSL_SAMPLER_DIM_CUBEARRAY] = "CubeArray",
|
2021-10-07 19:58:56 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
switch (type->base_type)
|
|
|
|
{
|
|
|
|
case HLSL_TYPE_TEXTURE:
|
|
|
|
if (type->sampler_dim == HLSL_SAMPLER_DIM_GENERIC)
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(string, "Texture");
|
|
|
|
return string;
|
|
|
|
}
|
|
|
|
|
|
|
|
assert(type->sampler_dim < ARRAY_SIZE(dimensions));
|
|
|
|
assert(type->e.resource_format->base_type < ARRAY_SIZE(base_types));
|
2023-05-02 18:47:05 -07:00
|
|
|
vkd3d_string_buffer_printf(string, "Texture%s", dimensions[type->sampler_dim]);
|
|
|
|
if ((inner_string = hlsl_type_to_string(ctx, type->e.resource_format)))
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(string, "<%s>", inner_string->buffer);
|
|
|
|
hlsl_release_string_buffer(ctx, inner_string);
|
|
|
|
}
|
2021-10-07 19:58:56 -07:00
|
|
|
return string;
|
|
|
|
|
2021-08-12 19:03:26 -07:00
|
|
|
case HLSL_TYPE_UAV:
|
2023-05-02 18:47:05 -07:00
|
|
|
if (type->sampler_dim == HLSL_SAMPLER_DIM_BUFFER)
|
|
|
|
vkd3d_string_buffer_printf(string, "RWBuffer");
|
2023-05-16 03:55:31 -07:00
|
|
|
else if (type->sampler_dim == HLSL_SAMPLER_DIM_STRUCTURED_BUFFER)
|
|
|
|
vkd3d_string_buffer_printf(string, "RWStructuredBuffer");
|
2023-05-02 18:47:05 -07:00
|
|
|
else
|
|
|
|
vkd3d_string_buffer_printf(string, "RWTexture%s", dimensions[type->sampler_dim]);
|
|
|
|
if ((inner_string = hlsl_type_to_string(ctx, type->e.resource_format)))
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(string, "<%s>", inner_string->buffer);
|
|
|
|
hlsl_release_string_buffer(ctx, inner_string);
|
|
|
|
}
|
2021-08-12 19:03:26 -07:00
|
|
|
return string;
|
|
|
|
|
2021-10-07 19:58:56 -07:00
|
|
|
default:
|
|
|
|
vkd3d_string_buffer_printf(string, "<unexpected type>");
|
|
|
|
return string;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-12 12:38:50 -08:00
|
|
|
default:
|
2021-02-27 16:03:09 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "<unexpected type>");
|
|
|
|
return string;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
2021-02-12 12:38:50 -08:00
|
|
|
}
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2023-05-29 16:51:10 -07:00
|
|
|
struct vkd3d_string_buffer *hlsl_component_to_string(struct hlsl_ctx *ctx, const struct hlsl_ir_var *var,
|
|
|
|
unsigned int index)
|
|
|
|
{
|
|
|
|
struct hlsl_type *type = var->data_type, *current_type;
|
|
|
|
struct vkd3d_string_buffer *buffer;
|
|
|
|
unsigned int element_index;
|
|
|
|
|
|
|
|
if (!(buffer = hlsl_get_string_buffer(ctx)))
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
vkd3d_string_buffer_printf(buffer, "%s", var->name);
|
|
|
|
|
|
|
|
while (!type_is_single_component(type))
|
|
|
|
{
|
|
|
|
current_type = type;
|
|
|
|
element_index = traverse_path_from_component_index(ctx, &type, &index);
|
|
|
|
if (current_type->class == HLSL_CLASS_STRUCT)
|
|
|
|
vkd3d_string_buffer_printf(buffer, ".%s", current_type->e.record.fields[element_index].name);
|
|
|
|
else
|
|
|
|
vkd3d_string_buffer_printf(buffer, "[%u]", element_index);
|
|
|
|
}
|
|
|
|
|
|
|
|
return buffer;
|
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:22 -07:00
|
|
|
const char *debug_hlsl_type(struct hlsl_ctx *ctx, const struct hlsl_type *type)
|
2021-02-12 12:38:50 -08:00
|
|
|
{
|
2021-02-27 16:03:09 -08:00
|
|
|
struct vkd3d_string_buffer *string;
|
2021-02-12 12:38:50 -08:00
|
|
|
const char *ret;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2021-05-20 22:32:22 -07:00
|
|
|
if (!(string = hlsl_type_to_string(ctx, type)))
|
2021-02-12 12:38:50 -08:00
|
|
|
return NULL;
|
2021-02-27 16:03:09 -08:00
|
|
|
ret = vkd3d_dbg_sprintf("%s", string->buffer);
|
2021-05-20 22:32:22 -07:00
|
|
|
hlsl_release_string_buffer(ctx, string);
|
2021-02-12 12:38:50 -08:00
|
|
|
return ret;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:22 -07:00
|
|
|
struct vkd3d_string_buffer *hlsl_modifiers_to_string(struct hlsl_ctx *ctx, unsigned int modifiers)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2021-02-27 16:03:10 -08:00
|
|
|
struct vkd3d_string_buffer *string;
|
2021-02-12 12:38:49 -08:00
|
|
|
|
2021-05-20 22:32:22 -07:00
|
|
|
if (!(string = hlsl_get_string_buffer(ctx)))
|
2021-02-12 12:38:49 -08:00
|
|
|
return NULL;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
|
|
|
if (modifiers & HLSL_STORAGE_EXTERN)
|
2021-02-27 16:03:10 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "extern ");
|
2021-01-27 08:29:44 -08:00
|
|
|
if (modifiers & HLSL_STORAGE_NOINTERPOLATION)
|
2021-02-27 16:03:10 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "nointerpolation ");
|
2023-09-21 06:11:26 -07:00
|
|
|
if (modifiers & HLSL_STORAGE_CENTROID)
|
|
|
|
vkd3d_string_buffer_printf(string, "centroid ");
|
|
|
|
if (modifiers & HLSL_STORAGE_NOPERSPECTIVE)
|
|
|
|
vkd3d_string_buffer_printf(string, "noperspective ");
|
2021-01-27 08:29:44 -08:00
|
|
|
if (modifiers & HLSL_MODIFIER_PRECISE)
|
2021-02-27 16:03:10 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "precise ");
|
2021-01-27 08:29:44 -08:00
|
|
|
if (modifiers & HLSL_STORAGE_SHARED)
|
2021-02-27 16:03:10 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "shared ");
|
2021-01-27 08:29:44 -08:00
|
|
|
if (modifiers & HLSL_STORAGE_GROUPSHARED)
|
2021-02-27 16:03:10 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "groupshared ");
|
2021-01-27 08:29:44 -08:00
|
|
|
if (modifiers & HLSL_STORAGE_STATIC)
|
2021-02-27 16:03:10 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "static ");
|
2021-01-27 08:29:44 -08:00
|
|
|
if (modifiers & HLSL_STORAGE_UNIFORM)
|
2021-02-27 16:03:10 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "uniform ");
|
2022-12-12 15:17:53 -08:00
|
|
|
if (modifiers & HLSL_MODIFIER_VOLATILE)
|
2021-02-27 16:03:10 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "volatile ");
|
2021-01-27 08:29:44 -08:00
|
|
|
if (modifiers & HLSL_MODIFIER_CONST)
|
2021-02-27 16:03:10 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "const ");
|
2021-01-27 08:29:44 -08:00
|
|
|
if (modifiers & HLSL_MODIFIER_ROW_MAJOR)
|
2021-02-27 16:03:10 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "row_major ");
|
2021-01-27 08:29:44 -08:00
|
|
|
if (modifiers & HLSL_MODIFIER_COLUMN_MAJOR)
|
2021-02-27 16:03:10 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "column_major ");
|
2021-01-27 08:29:44 -08:00
|
|
|
if ((modifiers & (HLSL_STORAGE_IN | HLSL_STORAGE_OUT)) == (HLSL_STORAGE_IN | HLSL_STORAGE_OUT))
|
2021-02-27 16:03:10 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "inout ");
|
2021-01-27 08:29:44 -08:00
|
|
|
else if (modifiers & HLSL_STORAGE_IN)
|
2021-02-27 16:03:10 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "in ");
|
2021-01-27 08:29:44 -08:00
|
|
|
else if (modifiers & HLSL_STORAGE_OUT)
|
2021-02-27 16:03:10 -08:00
|
|
|
vkd3d_string_buffer_printf(string, "out ");
|
2021-02-12 12:38:49 -08:00
|
|
|
|
2021-02-27 16:03:10 -08:00
|
|
|
if (string->content_size)
|
|
|
|
string->buffer[--string->content_size] = 0;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2021-02-12 12:38:49 -08:00
|
|
|
return string;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:14 -08:00
|
|
|
const char *hlsl_node_type_to_string(enum hlsl_ir_node_type type)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
static const char * const names[] =
|
|
|
|
{
|
2023-06-12 03:29:35 -07:00
|
|
|
[HLSL_IR_CALL ] = "HLSL_IR_CALL",
|
|
|
|
[HLSL_IR_CONSTANT ] = "HLSL_IR_CONSTANT",
|
|
|
|
[HLSL_IR_EXPR ] = "HLSL_IR_EXPR",
|
|
|
|
[HLSL_IR_IF ] = "HLSL_IR_IF",
|
|
|
|
[HLSL_IR_INDEX ] = "HLSL_IR_INDEX",
|
|
|
|
[HLSL_IR_LOAD ] = "HLSL_IR_LOAD",
|
|
|
|
[HLSL_IR_LOOP ] = "HLSL_IR_LOOP",
|
|
|
|
[HLSL_IR_JUMP ] = "HLSL_IR_JUMP",
|
|
|
|
[HLSL_IR_RESOURCE_LOAD ] = "HLSL_IR_RESOURCE_LOAD",
|
|
|
|
[HLSL_IR_RESOURCE_STORE] = "HLSL_IR_RESOURCE_STORE",
|
|
|
|
[HLSL_IR_STORE ] = "HLSL_IR_STORE",
|
2023-10-11 04:51:51 -07:00
|
|
|
[HLSL_IR_SWITCH ] = "HLSL_IR_SWITCH",
|
2023-06-12 03:29:35 -07:00
|
|
|
[HLSL_IR_SWIZZLE ] = "HLSL_IR_SWIZZLE",
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
|
|
|
if (type >= ARRAY_SIZE(names))
|
|
|
|
return "Unexpected node type";
|
|
|
|
return names[type];
|
|
|
|
}
|
|
|
|
|
2021-09-09 23:53:17 -07:00
|
|
|
const char *hlsl_jump_type_to_string(enum hlsl_ir_jump_type type)
|
|
|
|
{
|
|
|
|
static const char * const names[] =
|
|
|
|
{
|
2023-06-12 03:29:35 -07:00
|
|
|
[HLSL_IR_JUMP_BREAK] = "HLSL_IR_JUMP_BREAK",
|
|
|
|
[HLSL_IR_JUMP_CONTINUE] = "HLSL_IR_JUMP_CONTINUE",
|
2023-06-08 03:42:50 -07:00
|
|
|
[HLSL_IR_JUMP_DISCARD_NEG] = "HLSL_IR_JUMP_DISCARD_NEG",
|
2023-06-12 03:29:35 -07:00
|
|
|
[HLSL_IR_JUMP_DISCARD_NZ] = "HLSL_IR_JUMP_DISCARD_NZ",
|
|
|
|
[HLSL_IR_JUMP_RETURN] = "HLSL_IR_JUMP_RETURN",
|
2021-09-09 23:53:17 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
assert(type < ARRAY_SIZE(names));
|
|
|
|
return names[type];
|
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:22 -07:00
|
|
|
static void dump_instr(struct hlsl_ctx *ctx, struct vkd3d_string_buffer *buffer, const struct hlsl_ir_node *instr);
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2022-12-09 11:45:57 -08:00
|
|
|
static void dump_block(struct hlsl_ctx *ctx, struct vkd3d_string_buffer *buffer, const struct hlsl_block *block)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
struct hlsl_ir_node *instr;
|
|
|
|
|
2022-12-09 11:45:57 -08:00
|
|
|
LIST_FOR_EACH_ENTRY(instr, &block->instrs, struct hlsl_ir_node, entry)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2021-05-20 22:32:22 -07:00
|
|
|
dump_instr(ctx, buffer, instr);
|
2021-01-27 08:29:44 -08:00
|
|
|
vkd3d_string_buffer_printf(buffer, "\n");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:15 -08:00
|
|
|
static void dump_src(struct vkd3d_string_buffer *buffer, const struct hlsl_src *src)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
if (src->node->index)
|
|
|
|
vkd3d_string_buffer_printf(buffer, "@%u", src->node->index);
|
|
|
|
else
|
|
|
|
vkd3d_string_buffer_printf(buffer, "%p", src->node);
|
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:22 -07:00
|
|
|
static void dump_ir_var(struct hlsl_ctx *ctx, struct vkd3d_string_buffer *buffer, const struct hlsl_ir_var *var)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2022-11-29 11:10:40 -08:00
|
|
|
if (var->storage_modifiers)
|
2021-04-27 10:14:18 -07:00
|
|
|
{
|
|
|
|
struct vkd3d_string_buffer *string;
|
|
|
|
|
2022-11-29 11:10:40 -08:00
|
|
|
if ((string = hlsl_modifiers_to_string(ctx, var->storage_modifiers)))
|
2021-04-27 10:14:18 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "%s ", string->buffer);
|
2021-05-20 22:32:22 -07:00
|
|
|
hlsl_release_string_buffer(ctx, string);
|
2021-04-27 10:14:18 -07:00
|
|
|
}
|
2021-05-20 22:32:22 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "%s %s", debug_hlsl_type(ctx, var->data_type), var->name);
|
2021-04-27 10:14:19 -07:00
|
|
|
if (var->semantic.name)
|
|
|
|
vkd3d_string_buffer_printf(buffer, " : %s%u", var->semantic.name, var->semantic.index);
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:15 -08:00
|
|
|
static void dump_deref(struct vkd3d_string_buffer *buffer, const struct hlsl_deref *deref)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2022-07-01 10:41:08 -07:00
|
|
|
unsigned int i;
|
|
|
|
|
2021-11-05 11:35:52 -07:00
|
|
|
if (deref->var)
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, "%s", deref->var->name);
|
vkd3d-shader/hlsl: Split deref-offset into a node and a constant uint.
This uint will be used for the following:
- Since SM4's relative addressing (the capability of passing a register
as an index to another register) only has whole-register granularity,
we will need to make the offset node express the offset in
whole-registers and specify the register component in this uint,
otherwise we would have to add additional / and % operations in the
output binary.
- If, after we apply constant folding and copy propagation, we determine
that the offset is a single constant node, we can store all the offset
in this uint constant, and remove the offset src.
This allows DCE to remove a good bunch of the nodes previously required
only for the offset constants, which makes the output more liteweight
and readable, and simplifies the implementation of relative addressing
when writing tpf in the following patches.
In dump_deref(), we use "c" to indicate components instead of whole
registers. Since now both the offset node and the offset uint are in
components a lowered deref would look like:
var[@42c + 2c]
But, once we express the offset node in whole registers we will remove
the "c" from the node part:
var[@22 + 3c]
2023-10-03 12:47:13 -07:00
|
|
|
if (!hlsl_deref_is_lowered(deref))
|
2022-07-01 10:41:08 -07:00
|
|
|
{
|
vkd3d-shader/hlsl: Split deref-offset into a node and a constant uint.
This uint will be used for the following:
- Since SM4's relative addressing (the capability of passing a register
as an index to another register) only has whole-register granularity,
we will need to make the offset node express the offset in
whole-registers and specify the register component in this uint,
otherwise we would have to add additional / and % operations in the
output binary.
- If, after we apply constant folding and copy propagation, we determine
that the offset is a single constant node, we can store all the offset
in this uint constant, and remove the offset src.
This allows DCE to remove a good bunch of the nodes previously required
only for the offset constants, which makes the output more liteweight
and readable, and simplifies the implementation of relative addressing
when writing tpf in the following patches.
In dump_deref(), we use "c" to indicate components instead of whole
registers. Since now both the offset node and the offset uint are in
components a lowered deref would look like:
var[@42c + 2c]
But, once we express the offset node in whole registers we will remove
the "c" from the node part:
var[@22 + 3c]
2023-10-03 12:47:13 -07:00
|
|
|
if (deref->path_len)
|
2022-07-01 10:41:08 -07:00
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, "[");
|
vkd3d-shader/hlsl: Split deref-offset into a node and a constant uint.
This uint will be used for the following:
- Since SM4's relative addressing (the capability of passing a register
as an index to another register) only has whole-register granularity,
we will need to make the offset node express the offset in
whole-registers and specify the register component in this uint,
otherwise we would have to add additional / and % operations in the
output binary.
- If, after we apply constant folding and copy propagation, we determine
that the offset is a single constant node, we can store all the offset
in this uint constant, and remove the offset src.
This allows DCE to remove a good bunch of the nodes previously required
only for the offset constants, which makes the output more liteweight
and readable, and simplifies the implementation of relative addressing
when writing tpf in the following patches.
In dump_deref(), we use "c" to indicate components instead of whole
registers. Since now both the offset node and the offset uint are in
components a lowered deref would look like:
var[@42c + 2c]
But, once we express the offset node in whole registers we will remove
the "c" from the node part:
var[@22 + 3c]
2023-10-03 12:47:13 -07:00
|
|
|
for (i = 0; i < deref->path_len; ++i)
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, "[");
|
|
|
|
dump_src(buffer, &deref->path[i]);
|
|
|
|
vkd3d_string_buffer_printf(buffer, "]");
|
|
|
|
}
|
2022-07-01 10:41:08 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "]");
|
|
|
|
}
|
|
|
|
}
|
vkd3d-shader/hlsl: Split deref-offset into a node and a constant uint.
This uint will be used for the following:
- Since SM4's relative addressing (the capability of passing a register
as an index to another register) only has whole-register granularity,
we will need to make the offset node express the offset in
whole-registers and specify the register component in this uint,
otherwise we would have to add additional / and % operations in the
output binary.
- If, after we apply constant folding and copy propagation, we determine
that the offset is a single constant node, we can store all the offset
in this uint constant, and remove the offset src.
This allows DCE to remove a good bunch of the nodes previously required
only for the offset constants, which makes the output more liteweight
and readable, and simplifies the implementation of relative addressing
when writing tpf in the following patches.
In dump_deref(), we use "c" to indicate components instead of whole
registers. Since now both the offset node and the offset uint are in
components a lowered deref would look like:
var[@42c + 2c]
But, once we express the offset node in whole registers we will remove
the "c" from the node part:
var[@22 + 3c]
2023-10-03 12:47:13 -07:00
|
|
|
else
|
2021-11-05 11:35:52 -07:00
|
|
|
{
|
vkd3d-shader/hlsl: Split deref-offset into a node and a constant uint.
This uint will be used for the following:
- Since SM4's relative addressing (the capability of passing a register
as an index to another register) only has whole-register granularity,
we will need to make the offset node express the offset in
whole-registers and specify the register component in this uint,
otherwise we would have to add additional / and % operations in the
output binary.
- If, after we apply constant folding and copy propagation, we determine
that the offset is a single constant node, we can store all the offset
in this uint constant, and remove the offset src.
This allows DCE to remove a good bunch of the nodes previously required
only for the offset constants, which makes the output more liteweight
and readable, and simplifies the implementation of relative addressing
when writing tpf in the following patches.
In dump_deref(), we use "c" to indicate components instead of whole
registers. Since now both the offset node and the offset uint are in
components a lowered deref would look like:
var[@42c + 2c]
But, once we express the offset node in whole registers we will remove
the "c" from the node part:
var[@22 + 3c]
2023-10-03 12:47:13 -07:00
|
|
|
bool show_rel, show_const;
|
|
|
|
|
2023-10-06 09:56:24 -07:00
|
|
|
show_rel = deref->rel_offset.node;
|
vkd3d-shader/hlsl: Split deref-offset into a node and a constant uint.
This uint will be used for the following:
- Since SM4's relative addressing (the capability of passing a register
as an index to another register) only has whole-register granularity,
we will need to make the offset node express the offset in
whole-registers and specify the register component in this uint,
otherwise we would have to add additional / and % operations in the
output binary.
- If, after we apply constant folding and copy propagation, we determine
that the offset is a single constant node, we can store all the offset
in this uint constant, and remove the offset src.
This allows DCE to remove a good bunch of the nodes previously required
only for the offset constants, which makes the output more liteweight
and readable, and simplifies the implementation of relative addressing
when writing tpf in the following patches.
In dump_deref(), we use "c" to indicate components instead of whole
registers. Since now both the offset node and the offset uint are in
components a lowered deref would look like:
var[@42c + 2c]
But, once we express the offset node in whole registers we will remove
the "c" from the node part:
var[@22 + 3c]
2023-10-03 12:47:13 -07:00
|
|
|
show_const = deref->const_offset != 0 || !show_rel;
|
|
|
|
|
2021-11-05 11:35:52 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "[");
|
vkd3d-shader/hlsl: Split deref-offset into a node and a constant uint.
This uint will be used for the following:
- Since SM4's relative addressing (the capability of passing a register
as an index to another register) only has whole-register granularity,
we will need to make the offset node express the offset in
whole-registers and specify the register component in this uint,
otherwise we would have to add additional / and % operations in the
output binary.
- If, after we apply constant folding and copy propagation, we determine
that the offset is a single constant node, we can store all the offset
in this uint constant, and remove the offset src.
This allows DCE to remove a good bunch of the nodes previously required
only for the offset constants, which makes the output more liteweight
and readable, and simplifies the implementation of relative addressing
when writing tpf in the following patches.
In dump_deref(), we use "c" to indicate components instead of whole
registers. Since now both the offset node and the offset uint are in
components a lowered deref would look like:
var[@42c + 2c]
But, once we express the offset node in whole registers we will remove
the "c" from the node part:
var[@22 + 3c]
2023-10-03 12:47:13 -07:00
|
|
|
if (show_rel)
|
2023-10-06 09:56:24 -07:00
|
|
|
dump_src(buffer, &deref->rel_offset);
|
vkd3d-shader/hlsl: Split deref-offset into a node and a constant uint.
This uint will be used for the following:
- Since SM4's relative addressing (the capability of passing a register
as an index to another register) only has whole-register granularity,
we will need to make the offset node express the offset in
whole-registers and specify the register component in this uint,
otherwise we would have to add additional / and % operations in the
output binary.
- If, after we apply constant folding and copy propagation, we determine
that the offset is a single constant node, we can store all the offset
in this uint constant, and remove the offset src.
This allows DCE to remove a good bunch of the nodes previously required
only for the offset constants, which makes the output more liteweight
and readable, and simplifies the implementation of relative addressing
when writing tpf in the following patches.
In dump_deref(), we use "c" to indicate components instead of whole
registers. Since now both the offset node and the offset uint are in
components a lowered deref would look like:
var[@42c + 2c]
But, once we express the offset node in whole registers we will remove
the "c" from the node part:
var[@22 + 3c]
2023-10-03 12:47:13 -07:00
|
|
|
if (show_rel && show_const)
|
|
|
|
vkd3d_string_buffer_printf(buffer, " + ");
|
|
|
|
if (show_const)
|
|
|
|
vkd3d_string_buffer_printf(buffer, "%uc", deref->const_offset);
|
2021-11-05 11:35:52 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "]");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2021-11-05 11:35:52 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "(nil)");
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-08 21:38:23 -07:00
|
|
|
const char *debug_hlsl_writemask(unsigned int writemask)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
static const char components[] = {'x', 'y', 'z', 'w'};
|
|
|
|
char string[5];
|
|
|
|
unsigned int i = 0, pos = 0;
|
|
|
|
|
|
|
|
assert(!(writemask & ~VKD3DSP_WRITEMASK_ALL));
|
|
|
|
|
|
|
|
while (writemask)
|
|
|
|
{
|
|
|
|
if (writemask & 1)
|
|
|
|
string[pos++] = components[i];
|
|
|
|
writemask >>= 1;
|
|
|
|
i++;
|
|
|
|
}
|
|
|
|
string[pos] = '\0';
|
|
|
|
return vkd3d_dbg_sprintf(".%s", string);
|
|
|
|
}
|
|
|
|
|
2021-12-01 08:14:50 -08:00
|
|
|
const char *debug_hlsl_swizzle(unsigned int swizzle, unsigned int size)
|
|
|
|
{
|
|
|
|
static const char components[] = {'x', 'y', 'z', 'w'};
|
|
|
|
char string[5];
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
assert(size <= ARRAY_SIZE(components));
|
|
|
|
for (i = 0; i < size; ++i)
|
2023-01-12 13:52:49 -08:00
|
|
|
string[i] = components[hlsl_swizzle_get_component(swizzle, i)];
|
2021-12-01 08:14:50 -08:00
|
|
|
string[size] = 0;
|
|
|
|
return vkd3d_dbg_sprintf(".%s", string);
|
|
|
|
}
|
|
|
|
|
2021-09-09 19:06:38 -07:00
|
|
|
static void dump_ir_call(struct hlsl_ctx *ctx, struct vkd3d_string_buffer *buffer, const struct hlsl_ir_call *call)
|
|
|
|
{
|
|
|
|
const struct hlsl_ir_function_decl *decl = call->decl;
|
|
|
|
struct vkd3d_string_buffer *string;
|
2023-01-31 17:27:01 -08:00
|
|
|
size_t i;
|
2021-09-09 19:06:38 -07:00
|
|
|
|
|
|
|
if (!(string = hlsl_type_to_string(ctx, decl->return_type)))
|
|
|
|
return;
|
|
|
|
|
|
|
|
vkd3d_string_buffer_printf(buffer, "call %s %s(", string->buffer, decl->func->name);
|
|
|
|
hlsl_release_string_buffer(ctx, string);
|
|
|
|
|
2023-01-31 17:27:01 -08:00
|
|
|
for (i = 0; i < decl->parameters.count; ++i)
|
2021-09-09 19:06:38 -07:00
|
|
|
{
|
2023-01-31 17:27:01 -08:00
|
|
|
const struct hlsl_ir_var *param = decl->parameters.vars[i];
|
|
|
|
|
2021-09-09 19:06:38 -07:00
|
|
|
if (!(string = hlsl_type_to_string(ctx, param->data_type)))
|
|
|
|
return;
|
|
|
|
|
2023-01-31 17:27:01 -08:00
|
|
|
if (i)
|
2021-09-09 19:06:38 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, ", ");
|
2023-01-31 17:27:01 -08:00
|
|
|
vkd3d_string_buffer_printf(buffer, "%s", string->buffer);
|
2021-09-09 19:06:38 -07:00
|
|
|
|
|
|
|
hlsl_release_string_buffer(ctx, string);
|
|
|
|
}
|
|
|
|
vkd3d_string_buffer_printf(buffer, ")");
|
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:15 -08:00
|
|
|
static void dump_ir_constant(struct vkd3d_string_buffer *buffer, const struct hlsl_ir_constant *constant)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
struct hlsl_type *type = constant->node.data_type;
|
|
|
|
unsigned int x;
|
|
|
|
|
|
|
|
if (type->dimx != 1)
|
|
|
|
vkd3d_string_buffer_printf(buffer, "{");
|
|
|
|
for (x = 0; x < type->dimx; ++x)
|
|
|
|
{
|
2022-11-11 16:39:55 -08:00
|
|
|
const union hlsl_constant_value_component *value = &constant->value.u[x];
|
2021-09-20 14:40:10 -07:00
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
switch (type->base_type)
|
|
|
|
{
|
|
|
|
case HLSL_TYPE_BOOL:
|
2022-04-05 03:33:12 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "%s ", value->u ? "true" : "false");
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_TYPE_DOUBLE:
|
2021-09-20 14:40:10 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "%.16e ", value->d);
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_TYPE_FLOAT:
|
2022-08-02 08:11:15 -07:00
|
|
|
case HLSL_TYPE_HALF:
|
2021-09-20 14:40:10 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "%.8e ", value->f);
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_TYPE_INT:
|
2021-09-20 14:40:10 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "%d ", value->i);
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_TYPE_UINT:
|
2021-09-20 14:40:10 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "%u ", value->u);
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
|
|
|
default:
|
2022-08-31 04:25:24 -07:00
|
|
|
vkd3d_unreachable();
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if (type->dimx != 1)
|
|
|
|
vkd3d_string_buffer_printf(buffer, "}");
|
|
|
|
}
|
|
|
|
|
2021-08-13 07:03:24 -07:00
|
|
|
const char *debug_hlsl_expr_op(enum hlsl_ir_expr_op op)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2021-08-12 17:36:12 -07:00
|
|
|
static const char *const op_names[] =
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2021-09-09 19:06:38 -07:00
|
|
|
[HLSL_OP0_VOID] = "void",
|
|
|
|
|
2021-08-12 17:36:13 -07:00
|
|
|
[HLSL_OP1_ABS] = "abs",
|
|
|
|
[HLSL_OP1_BIT_NOT] = "~",
|
|
|
|
[HLSL_OP1_CAST] = "cast",
|
2023-10-17 14:04:08 -07:00
|
|
|
[HLSL_OP1_CEIL] = "ceil",
|
2021-08-12 17:36:13 -07:00
|
|
|
[HLSL_OP1_COS] = "cos",
|
|
|
|
[HLSL_OP1_COS_REDUCED] = "cos_reduced",
|
|
|
|
[HLSL_OP1_DSX] = "dsx",
|
2023-06-19 12:29:58 -07:00
|
|
|
[HLSL_OP1_DSX_COARSE] = "dsx_coarse",
|
2023-06-19 13:05:36 -07:00
|
|
|
[HLSL_OP1_DSX_FINE] = "dsx_fine",
|
2021-08-12 17:36:13 -07:00
|
|
|
[HLSL_OP1_DSY] = "dsy",
|
2023-06-19 12:29:58 -07:00
|
|
|
[HLSL_OP1_DSY_COARSE] = "dsy_coarse",
|
2023-06-19 13:05:36 -07:00
|
|
|
[HLSL_OP1_DSY_FINE] = "dsy_fine",
|
2021-08-12 17:36:13 -07:00
|
|
|
[HLSL_OP1_EXP2] = "exp2",
|
2023-10-17 15:01:51 -07:00
|
|
|
[HLSL_OP1_FLOOR] = "floor",
|
2021-08-12 17:36:13 -07:00
|
|
|
[HLSL_OP1_FRACT] = "fract",
|
|
|
|
[HLSL_OP1_LOG2] = "log2",
|
|
|
|
[HLSL_OP1_LOGIC_NOT] = "!",
|
|
|
|
[HLSL_OP1_NEG] = "-",
|
|
|
|
[HLSL_OP1_NRM] = "nrm",
|
|
|
|
[HLSL_OP1_RCP] = "rcp",
|
2022-10-09 20:59:12 -07:00
|
|
|
[HLSL_OP1_REINTERPRET] = "reinterpret",
|
2021-11-19 06:38:41 -08:00
|
|
|
[HLSL_OP1_ROUND] = "round",
|
2021-08-12 17:36:13 -07:00
|
|
|
[HLSL_OP1_RSQ] = "rsq",
|
|
|
|
[HLSL_OP1_SAT] = "sat",
|
|
|
|
[HLSL_OP1_SIGN] = "sign",
|
|
|
|
[HLSL_OP1_SIN] = "sin",
|
|
|
|
[HLSL_OP1_SIN_REDUCED] = "sin_reduced",
|
|
|
|
[HLSL_OP1_SQRT] = "sqrt",
|
2023-04-14 14:00:57 -07:00
|
|
|
[HLSL_OP1_TRUNC] = "trunc",
|
2021-08-12 17:36:13 -07:00
|
|
|
|
|
|
|
[HLSL_OP2_ADD] = "+",
|
|
|
|
[HLSL_OP2_BIT_AND] = "&",
|
|
|
|
[HLSL_OP2_BIT_OR] = "|",
|
|
|
|
[HLSL_OP2_BIT_XOR] = "^",
|
|
|
|
[HLSL_OP2_CRS] = "crs",
|
|
|
|
[HLSL_OP2_DIV] = "/",
|
|
|
|
[HLSL_OP2_DOT] = "dot",
|
|
|
|
[HLSL_OP2_EQUAL] = "==",
|
|
|
|
[HLSL_OP2_GEQUAL] = ">=",
|
|
|
|
[HLSL_OP2_LESS] = "<",
|
|
|
|
[HLSL_OP2_LOGIC_AND] = "&&",
|
|
|
|
[HLSL_OP2_LOGIC_OR] = "||",
|
|
|
|
[HLSL_OP2_LSHIFT] = "<<",
|
|
|
|
[HLSL_OP2_MAX] = "max",
|
|
|
|
[HLSL_OP2_MIN] = "min",
|
|
|
|
[HLSL_OP2_MOD] = "%",
|
|
|
|
[HLSL_OP2_MUL] = "*",
|
|
|
|
[HLSL_OP2_NEQUAL] = "!=",
|
|
|
|
[HLSL_OP2_RSHIFT] = ">>",
|
|
|
|
|
2023-09-26 11:48:58 -07:00
|
|
|
[HLSL_OP3_CMP] = "cmp",
|
2023-01-26 12:56:00 -08:00
|
|
|
[HLSL_OP3_DP2ADD] = "dp2add",
|
2023-07-24 23:46:28 -07:00
|
|
|
[HLSL_OP3_MOVC] = "movc",
|
|
|
|
[HLSL_OP3_TERNARY] = "ternary",
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2021-08-13 07:03:24 -07:00
|
|
|
return op_names[op];
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:15 -08:00
|
|
|
static void dump_ir_expr(struct vkd3d_string_buffer *buffer, const struct hlsl_ir_expr *expr)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
unsigned int i;
|
|
|
|
|
2021-08-13 07:03:24 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "%s (", debug_hlsl_expr_op(expr->op));
|
2021-09-27 18:51:44 -07:00
|
|
|
for (i = 0; i < HLSL_MAX_OPERANDS && expr->operands[i].node; ++i)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2021-02-02 14:11:15 -08:00
|
|
|
dump_src(buffer, &expr->operands[i]);
|
2021-01-27 08:29:44 -08:00
|
|
|
vkd3d_string_buffer_printf(buffer, " ");
|
|
|
|
}
|
|
|
|
vkd3d_string_buffer_printf(buffer, ")");
|
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:22 -07:00
|
|
|
static void dump_ir_if(struct hlsl_ctx *ctx, struct vkd3d_string_buffer *buffer, const struct hlsl_ir_if *if_node)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, "if (");
|
2021-02-02 14:11:15 -08:00
|
|
|
dump_src(buffer, &if_node->condition);
|
2021-11-05 11:35:56 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, ") {\n");
|
2022-12-09 11:45:57 -08:00
|
|
|
dump_block(ctx, buffer, &if_node->then_block);
|
2021-11-05 11:35:56 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, " %10s } else {\n", "");
|
2022-12-09 11:45:57 -08:00
|
|
|
dump_block(ctx, buffer, &if_node->else_block);
|
2021-11-05 11:35:56 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, " %10s }", "");
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:15 -08:00
|
|
|
static void dump_ir_jump(struct vkd3d_string_buffer *buffer, const struct hlsl_ir_jump *jump)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
switch (jump->type)
|
|
|
|
{
|
|
|
|
case HLSL_IR_JUMP_BREAK:
|
|
|
|
vkd3d_string_buffer_printf(buffer, "break");
|
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_IR_JUMP_CONTINUE:
|
|
|
|
vkd3d_string_buffer_printf(buffer, "continue");
|
|
|
|
break;
|
|
|
|
|
2023-06-08 03:42:50 -07:00
|
|
|
case HLSL_IR_JUMP_DISCARD_NEG:
|
|
|
|
vkd3d_string_buffer_printf(buffer, "discard_neg");
|
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_IR_JUMP_DISCARD_NZ:
|
|
|
|
vkd3d_string_buffer_printf(buffer, "discard_nz");
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_IR_JUMP_RETURN:
|
|
|
|
vkd3d_string_buffer_printf(buffer, "return");
|
|
|
|
break;
|
2023-09-25 11:29:54 -07:00
|
|
|
|
|
|
|
case HLSL_IR_JUMP_UNRESOLVED_CONTINUE:
|
|
|
|
vkd3d_string_buffer_printf(buffer, "unresolved_continue");
|
|
|
|
break;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:22 -07:00
|
|
|
static void dump_ir_loop(struct hlsl_ctx *ctx, struct vkd3d_string_buffer *buffer, const struct hlsl_ir_loop *loop)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2021-11-05 11:35:56 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "for (;;) {\n");
|
2022-12-09 11:45:57 -08:00
|
|
|
dump_block(ctx, buffer, &loop->body);
|
2021-11-05 11:35:56 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, " %10s }", "");
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
2021-10-07 19:58:57 -07:00
|
|
|
static void dump_ir_resource_load(struct vkd3d_string_buffer *buffer, const struct hlsl_ir_resource_load *load)
|
|
|
|
{
|
|
|
|
static const char *const type_names[] =
|
|
|
|
{
|
|
|
|
[HLSL_RESOURCE_LOAD] = "load_resource",
|
2021-11-05 11:35:52 -07:00
|
|
|
[HLSL_RESOURCE_SAMPLE] = "sample",
|
2023-05-16 11:54:22 -07:00
|
|
|
[HLSL_RESOURCE_SAMPLE_CMP] = "sample_cmp",
|
2023-05-26 02:14:07 -07:00
|
|
|
[HLSL_RESOURCE_SAMPLE_CMP_LZ] = "sample_cmp_lz",
|
2021-08-16 18:28:47 -07:00
|
|
|
[HLSL_RESOURCE_SAMPLE_LOD] = "sample_lod",
|
2023-04-19 10:24:14 -07:00
|
|
|
[HLSL_RESOURCE_SAMPLE_LOD_BIAS] = "sample_biased",
|
2023-05-05 08:13:18 -07:00
|
|
|
[HLSL_RESOURCE_SAMPLE_GRAD] = "sample_grad",
|
2022-01-26 06:35:32 -08:00
|
|
|
[HLSL_RESOURCE_GATHER_RED] = "gather_red",
|
|
|
|
[HLSL_RESOURCE_GATHER_GREEN] = "gather_green",
|
|
|
|
[HLSL_RESOURCE_GATHER_BLUE] = "gather_blue",
|
|
|
|
[HLSL_RESOURCE_GATHER_ALPHA] = "gather_alpha",
|
2023-06-07 10:56:02 -07:00
|
|
|
[HLSL_RESOURCE_SAMPLE_INFO] = "sample_info",
|
|
|
|
[HLSL_RESOURCE_RESINFO] = "resinfo",
|
2021-10-07 19:58:57 -07:00
|
|
|
};
|
|
|
|
|
2022-01-26 06:35:32 -08:00
|
|
|
assert(load->load_type < ARRAY_SIZE(type_names));
|
2021-10-07 19:58:57 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "%s(resource = ", type_names[load->load_type]);
|
|
|
|
dump_deref(buffer, &load->resource);
|
2021-11-05 11:35:52 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, ", sampler = ");
|
|
|
|
dump_deref(buffer, &load->sampler);
|
2023-06-07 10:56:02 -07:00
|
|
|
if (load->coords.node)
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, ", coords = ");
|
|
|
|
dump_src(buffer, &load->coords);
|
|
|
|
}
|
2023-04-27 01:15:36 -07:00
|
|
|
if (load->sample_index.node)
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, ", sample index = ");
|
|
|
|
dump_src(buffer, &load->sample_index);
|
|
|
|
}
|
2022-01-26 06:35:29 -08:00
|
|
|
if (load->texel_offset.node)
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, ", offset = ");
|
|
|
|
dump_src(buffer, &load->texel_offset);
|
|
|
|
}
|
2021-08-16 18:28:47 -07:00
|
|
|
if (load->lod.node)
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, ", lod = ");
|
|
|
|
dump_src(buffer, &load->lod);
|
|
|
|
}
|
2023-05-05 08:13:18 -07:00
|
|
|
if (load->ddx.node)
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, ", ddx = ");
|
|
|
|
dump_src(buffer, &load->ddx);
|
|
|
|
}
|
|
|
|
if (load->ddy.node)
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, ", ddy = ");
|
|
|
|
dump_src(buffer, &load->ddy);
|
|
|
|
}
|
2023-05-16 11:54:22 -07:00
|
|
|
if (load->cmp.node)
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, ", cmp = ");
|
|
|
|
dump_src(buffer, &load->cmp);
|
|
|
|
}
|
2021-10-07 19:58:57 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, ")");
|
|
|
|
}
|
|
|
|
|
2021-08-15 10:08:32 -07:00
|
|
|
static void dump_ir_resource_store(struct vkd3d_string_buffer *buffer, const struct hlsl_ir_resource_store *store)
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, "store_resource(resource = ");
|
|
|
|
dump_deref(buffer, &store->resource);
|
|
|
|
vkd3d_string_buffer_printf(buffer, ", coords = ");
|
|
|
|
dump_src(buffer, &store->coords);
|
|
|
|
vkd3d_string_buffer_printf(buffer, ", value = ");
|
|
|
|
dump_src(buffer, &store->value);
|
|
|
|
vkd3d_string_buffer_printf(buffer, ")");
|
|
|
|
}
|
|
|
|
|
2021-04-08 21:38:22 -07:00
|
|
|
static void dump_ir_store(struct vkd3d_string_buffer *buffer, const struct hlsl_ir_store *store)
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, "= (");
|
|
|
|
dump_deref(buffer, &store->lhs);
|
|
|
|
if (store->writemask != VKD3DSP_WRITEMASK_ALL)
|
2021-04-08 21:38:23 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "%s", debug_hlsl_writemask(store->writemask));
|
2021-04-08 21:38:22 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, " ");
|
|
|
|
dump_src(buffer, &store->rhs);
|
|
|
|
vkd3d_string_buffer_printf(buffer, ")");
|
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:15 -08:00
|
|
|
static void dump_ir_swizzle(struct vkd3d_string_buffer *buffer, const struct hlsl_ir_swizzle *swizzle)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
unsigned int i;
|
|
|
|
|
2021-02-02 14:11:15 -08:00
|
|
|
dump_src(buffer, &swizzle->val);
|
2021-01-27 08:29:44 -08:00
|
|
|
if (swizzle->val.node->data_type->dimy > 1)
|
|
|
|
{
|
2021-12-01 08:14:50 -08:00
|
|
|
vkd3d_string_buffer_printf(buffer, ".");
|
2021-01-27 08:29:44 -08:00
|
|
|
for (i = 0; i < swizzle->node.data_type->dimx; ++i)
|
|
|
|
vkd3d_string_buffer_printf(buffer, "_m%u%u", (swizzle->swizzle >> i * 8) & 0xf, (swizzle->swizzle >> (i * 8 + 4)) & 0xf);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2021-12-01 08:14:50 -08:00
|
|
|
vkd3d_string_buffer_printf(buffer, "%s", debug_hlsl_swizzle(swizzle->swizzle, swizzle->node.data_type->dimx));
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-02-24 11:39:56 -08:00
|
|
|
static void dump_ir_index(struct vkd3d_string_buffer *buffer, const struct hlsl_ir_index *index)
|
|
|
|
{
|
|
|
|
dump_src(buffer, &index->val);
|
|
|
|
vkd3d_string_buffer_printf(buffer, "[idx:");
|
|
|
|
dump_src(buffer, &index->idx);
|
|
|
|
vkd3d_string_buffer_printf(buffer, "]");
|
|
|
|
}
|
|
|
|
|
2023-10-11 04:51:51 -07:00
|
|
|
static void dump_ir_switch(struct hlsl_ctx *ctx, struct vkd3d_string_buffer *buffer, const struct hlsl_ir_switch *s)
|
|
|
|
{
|
|
|
|
struct hlsl_ir_switch_case *c;
|
|
|
|
|
|
|
|
vkd3d_string_buffer_printf(buffer, "switch (");
|
|
|
|
dump_src(buffer, &s->selector);
|
|
|
|
vkd3d_string_buffer_printf(buffer, ") {\n");
|
|
|
|
|
|
|
|
LIST_FOR_EACH_ENTRY(c, &s->cases, struct hlsl_ir_switch_case, entry)
|
|
|
|
{
|
|
|
|
if (c->is_default)
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, " %10s default: {\n", "");
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_printf(buffer, " %10s case %u : {\n", "", c->value);
|
|
|
|
}
|
|
|
|
|
|
|
|
dump_block(ctx, buffer, &c->body);
|
|
|
|
vkd3d_string_buffer_printf(buffer, " %10s }\n", "");
|
|
|
|
}
|
|
|
|
|
|
|
|
vkd3d_string_buffer_printf(buffer, " %10s }", "");
|
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:22 -07:00
|
|
|
static void dump_instr(struct hlsl_ctx *ctx, struct vkd3d_string_buffer *buffer, const struct hlsl_ir_node *instr)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
if (instr->index)
|
|
|
|
vkd3d_string_buffer_printf(buffer, "%4u: ", instr->index);
|
|
|
|
else
|
|
|
|
vkd3d_string_buffer_printf(buffer, "%p: ", instr);
|
|
|
|
|
2021-05-20 22:32:22 -07:00
|
|
|
vkd3d_string_buffer_printf(buffer, "%10s | ", instr->data_type ? debug_hlsl_type(ctx, instr->data_type) : "");
|
2021-01-27 08:29:44 -08:00
|
|
|
|
|
|
|
switch (instr->type)
|
|
|
|
{
|
2021-09-09 19:06:38 -07:00
|
|
|
case HLSL_IR_CALL:
|
|
|
|
dump_ir_call(ctx, buffer, hlsl_ir_call(instr));
|
|
|
|
break;
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
case HLSL_IR_CONSTANT:
|
2021-02-02 14:11:15 -08:00
|
|
|
dump_ir_constant(buffer, hlsl_ir_constant(instr));
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_IR_EXPR:
|
2021-02-02 14:11:15 -08:00
|
|
|
dump_ir_expr(buffer, hlsl_ir_expr(instr));
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_IR_IF:
|
2021-05-20 22:32:22 -07:00
|
|
|
dump_ir_if(ctx, buffer, hlsl_ir_if(instr));
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
2023-02-24 11:39:56 -08:00
|
|
|
case HLSL_IR_INDEX:
|
|
|
|
dump_ir_index(buffer, hlsl_ir_index(instr));
|
|
|
|
break;
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
case HLSL_IR_JUMP:
|
2021-02-02 14:11:15 -08:00
|
|
|
dump_ir_jump(buffer, hlsl_ir_jump(instr));
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_IR_LOAD:
|
2021-02-02 14:11:15 -08:00
|
|
|
dump_deref(buffer, &hlsl_ir_load(instr)->src);
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_IR_LOOP:
|
2021-05-20 22:32:22 -07:00
|
|
|
dump_ir_loop(ctx, buffer, hlsl_ir_loop(instr));
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
2021-10-07 19:58:57 -07:00
|
|
|
case HLSL_IR_RESOURCE_LOAD:
|
|
|
|
dump_ir_resource_load(buffer, hlsl_ir_resource_load(instr));
|
|
|
|
break;
|
|
|
|
|
2021-08-15 10:08:32 -07:00
|
|
|
case HLSL_IR_RESOURCE_STORE:
|
|
|
|
dump_ir_resource_store(buffer, hlsl_ir_resource_store(instr));
|
|
|
|
break;
|
|
|
|
|
2021-04-08 21:38:22 -07:00
|
|
|
case HLSL_IR_STORE:
|
|
|
|
dump_ir_store(buffer, hlsl_ir_store(instr));
|
|
|
|
break;
|
|
|
|
|
2023-10-11 04:51:51 -07:00
|
|
|
case HLSL_IR_SWITCH:
|
|
|
|
dump_ir_switch(ctx, buffer, hlsl_ir_switch(instr));
|
|
|
|
break;
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
case HLSL_IR_SWIZZLE:
|
2021-02-02 14:11:15 -08:00
|
|
|
dump_ir_swizzle(buffer, hlsl_ir_swizzle(instr));
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:22 -07:00
|
|
|
void hlsl_dump_function(struct hlsl_ctx *ctx, const struct hlsl_ir_function_decl *func)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
struct vkd3d_string_buffer buffer;
|
2023-01-31 17:27:01 -08:00
|
|
|
size_t i;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
|
|
|
vkd3d_string_buffer_init(&buffer);
|
|
|
|
vkd3d_string_buffer_printf(&buffer, "Dumping function %s.\n", func->func->name);
|
|
|
|
vkd3d_string_buffer_printf(&buffer, "Function parameters:\n");
|
2023-01-31 17:27:01 -08:00
|
|
|
for (i = 0; i < func->parameters.count; ++i)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2023-01-31 17:27:01 -08:00
|
|
|
dump_ir_var(ctx, &buffer, func->parameters.vars[i]);
|
2021-01-27 08:29:44 -08:00
|
|
|
vkd3d_string_buffer_printf(&buffer, "\n");
|
|
|
|
}
|
2021-10-15 14:54:09 -07:00
|
|
|
if (func->has_body)
|
2022-12-09 11:45:57 -08:00
|
|
|
dump_block(ctx, &buffer, &func->body);
|
2021-01-27 08:29:44 -08:00
|
|
|
|
|
|
|
vkd3d_string_buffer_trace(&buffer);
|
|
|
|
vkd3d_string_buffer_cleanup(&buffer);
|
|
|
|
}
|
|
|
|
|
2022-02-10 19:48:18 -08:00
|
|
|
void hlsl_replace_node(struct hlsl_ir_node *old, struct hlsl_ir_node *new)
|
|
|
|
{
|
|
|
|
struct hlsl_src *src, *next;
|
|
|
|
|
2023-02-14 14:01:18 -08:00
|
|
|
assert(old->data_type->dimx == new->data_type->dimx);
|
|
|
|
assert(old->data_type->dimy == new->data_type->dimy);
|
|
|
|
|
2022-02-10 19:48:18 -08:00
|
|
|
LIST_FOR_EACH_ENTRY_SAFE(src, next, &old->uses, struct hlsl_src, entry)
|
|
|
|
{
|
|
|
|
hlsl_src_remove(src);
|
|
|
|
hlsl_src_from_node(src, new);
|
|
|
|
}
|
|
|
|
list_remove(&old->entry);
|
|
|
|
hlsl_free_instr(old);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2021-02-02 14:11:14 -08:00
|
|
|
void hlsl_free_type(struct hlsl_type *type)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2022-07-14 18:23:43 -07:00
|
|
|
struct hlsl_struct_field *field;
|
|
|
|
size_t i;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
|
|
|
vkd3d_free((void *)type->name);
|
2022-11-11 17:31:55 -08:00
|
|
|
if (type->class == HLSL_CLASS_STRUCT)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2022-07-14 18:23:43 -07:00
|
|
|
for (i = 0; i < type->e.record.field_count; ++i)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2022-07-14 18:23:43 -07:00
|
|
|
field = &type->e.record.fields[i];
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
vkd3d_free((void *)field->name);
|
2023-01-31 17:44:46 -08:00
|
|
|
hlsl_cleanup_semantic(&field->semantic);
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
2022-07-14 18:23:43 -07:00
|
|
|
vkd3d_free((void *)type->e.record.fields);
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
vkd3d_free(type);
|
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:14 -08:00
|
|
|
void hlsl_free_instr_list(struct list *list)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
struct hlsl_ir_node *node, *next_node;
|
|
|
|
|
|
|
|
if (!list)
|
|
|
|
return;
|
|
|
|
/* Iterate in reverse, to avoid use-after-free when unlinking sources from
|
|
|
|
* the "uses" list. */
|
|
|
|
LIST_FOR_EACH_ENTRY_SAFE_REV(node, next_node, list, struct hlsl_ir_node, entry)
|
2021-02-02 14:11:14 -08:00
|
|
|
hlsl_free_instr(node);
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
2023-04-14 11:02:15 -07:00
|
|
|
void hlsl_block_cleanup(struct hlsl_block *block)
|
|
|
|
{
|
|
|
|
hlsl_free_instr_list(&block->instrs);
|
|
|
|
}
|
|
|
|
|
2021-09-09 19:06:38 -07:00
|
|
|
static void free_ir_call(struct hlsl_ir_call *call)
|
|
|
|
{
|
|
|
|
vkd3d_free(call);
|
|
|
|
}
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
static void free_ir_constant(struct hlsl_ir_constant *constant)
|
|
|
|
{
|
|
|
|
vkd3d_free(constant);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void free_ir_expr(struct hlsl_ir_expr *expr)
|
|
|
|
{
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
for (i = 0; i < ARRAY_SIZE(expr->operands); ++i)
|
|
|
|
hlsl_src_remove(&expr->operands[i]);
|
|
|
|
vkd3d_free(expr);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void free_ir_if(struct hlsl_ir_if *if_node)
|
|
|
|
{
|
2022-11-10 18:04:22 -08:00
|
|
|
hlsl_block_cleanup(&if_node->then_block);
|
|
|
|
hlsl_block_cleanup(&if_node->else_block);
|
2021-01-27 08:29:44 -08:00
|
|
|
hlsl_src_remove(&if_node->condition);
|
|
|
|
vkd3d_free(if_node);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void free_ir_jump(struct hlsl_ir_jump *jump)
|
|
|
|
{
|
2023-06-08 00:42:58 -07:00
|
|
|
hlsl_src_remove(&jump->condition);
|
2021-01-27 08:29:44 -08:00
|
|
|
vkd3d_free(jump);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void free_ir_load(struct hlsl_ir_load *load)
|
|
|
|
{
|
2022-06-30 15:20:20 -07:00
|
|
|
hlsl_cleanup_deref(&load->src);
|
2021-01-27 08:29:44 -08:00
|
|
|
vkd3d_free(load);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void free_ir_loop(struct hlsl_ir_loop *loop)
|
|
|
|
{
|
2023-04-14 11:02:15 -07:00
|
|
|
hlsl_block_cleanup(&loop->body);
|
2021-01-27 08:29:44 -08:00
|
|
|
vkd3d_free(loop);
|
|
|
|
}
|
|
|
|
|
2021-10-07 19:58:57 -07:00
|
|
|
static void free_ir_resource_load(struct hlsl_ir_resource_load *load)
|
|
|
|
{
|
2022-06-30 15:20:20 -07:00
|
|
|
hlsl_cleanup_deref(&load->sampler);
|
|
|
|
hlsl_cleanup_deref(&load->resource);
|
2021-08-16 18:28:47 -07:00
|
|
|
hlsl_src_remove(&load->coords);
|
|
|
|
hlsl_src_remove(&load->lod);
|
2023-05-05 08:13:18 -07:00
|
|
|
hlsl_src_remove(&load->ddx);
|
|
|
|
hlsl_src_remove(&load->ddy);
|
2023-05-16 11:54:22 -07:00
|
|
|
hlsl_src_remove(&load->cmp);
|
2022-01-26 06:35:29 -08:00
|
|
|
hlsl_src_remove(&load->texel_offset);
|
2023-04-27 01:15:36 -07:00
|
|
|
hlsl_src_remove(&load->sample_index);
|
2021-10-07 19:58:57 -07:00
|
|
|
vkd3d_free(load);
|
|
|
|
}
|
|
|
|
|
2021-08-15 10:08:32 -07:00
|
|
|
static void free_ir_resource_store(struct hlsl_ir_resource_store *store)
|
|
|
|
{
|
2023-10-06 09:56:24 -07:00
|
|
|
hlsl_src_remove(&store->resource.rel_offset);
|
2021-08-15 10:08:32 -07:00
|
|
|
hlsl_src_remove(&store->coords);
|
|
|
|
hlsl_src_remove(&store->value);
|
|
|
|
vkd3d_free(store);
|
|
|
|
}
|
|
|
|
|
2021-04-08 21:38:22 -07:00
|
|
|
static void free_ir_store(struct hlsl_ir_store *store)
|
|
|
|
{
|
|
|
|
hlsl_src_remove(&store->rhs);
|
2022-06-30 15:20:20 -07:00
|
|
|
hlsl_cleanup_deref(&store->lhs);
|
2021-04-08 21:38:22 -07:00
|
|
|
vkd3d_free(store);
|
|
|
|
}
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
static void free_ir_swizzle(struct hlsl_ir_swizzle *swizzle)
|
|
|
|
{
|
|
|
|
hlsl_src_remove(&swizzle->val);
|
|
|
|
vkd3d_free(swizzle);
|
|
|
|
}
|
|
|
|
|
2023-10-11 04:51:51 -07:00
|
|
|
static void free_ir_switch(struct hlsl_ir_switch *s)
|
|
|
|
{
|
|
|
|
hlsl_src_remove(&s->selector);
|
|
|
|
hlsl_cleanup_ir_switch_cases(&s->cases);
|
|
|
|
|
|
|
|
vkd3d_free(s);
|
|
|
|
}
|
|
|
|
|
2023-02-24 11:39:56 -08:00
|
|
|
static void free_ir_index(struct hlsl_ir_index *index)
|
|
|
|
{
|
|
|
|
hlsl_src_remove(&index->val);
|
|
|
|
hlsl_src_remove(&index->idx);
|
|
|
|
vkd3d_free(index);
|
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:14 -08:00
|
|
|
void hlsl_free_instr(struct hlsl_ir_node *node)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2021-08-19 08:29:06 -07:00
|
|
|
assert(list_empty(&node->uses));
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
switch (node->type)
|
|
|
|
{
|
2021-09-09 19:06:38 -07:00
|
|
|
case HLSL_IR_CALL:
|
|
|
|
free_ir_call(hlsl_ir_call(node));
|
|
|
|
break;
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
case HLSL_IR_CONSTANT:
|
2021-02-02 14:11:14 -08:00
|
|
|
free_ir_constant(hlsl_ir_constant(node));
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_IR_EXPR:
|
2021-02-02 14:11:14 -08:00
|
|
|
free_ir_expr(hlsl_ir_expr(node));
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_IR_IF:
|
2021-02-02 14:11:14 -08:00
|
|
|
free_ir_if(hlsl_ir_if(node));
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
2023-02-24 11:39:56 -08:00
|
|
|
case HLSL_IR_INDEX:
|
|
|
|
free_ir_index(hlsl_ir_index(node));
|
|
|
|
break;
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
case HLSL_IR_JUMP:
|
2021-02-02 14:11:14 -08:00
|
|
|
free_ir_jump(hlsl_ir_jump(node));
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_IR_LOAD:
|
2021-02-02 14:11:14 -08:00
|
|
|
free_ir_load(hlsl_ir_load(node));
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_IR_LOOP:
|
2021-02-02 14:11:14 -08:00
|
|
|
free_ir_loop(hlsl_ir_loop(node));
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
|
|
|
|
2021-10-07 19:58:57 -07:00
|
|
|
case HLSL_IR_RESOURCE_LOAD:
|
|
|
|
free_ir_resource_load(hlsl_ir_resource_load(node));
|
|
|
|
break;
|
|
|
|
|
2021-08-15 10:08:32 -07:00
|
|
|
case HLSL_IR_RESOURCE_STORE:
|
|
|
|
free_ir_resource_store(hlsl_ir_resource_store(node));
|
|
|
|
break;
|
|
|
|
|
2021-04-08 21:38:22 -07:00
|
|
|
case HLSL_IR_STORE:
|
|
|
|
free_ir_store(hlsl_ir_store(node));
|
|
|
|
break;
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
case HLSL_IR_SWIZZLE:
|
2021-02-02 14:11:14 -08:00
|
|
|
free_ir_swizzle(hlsl_ir_swizzle(node));
|
2021-01-27 08:29:44 -08:00
|
|
|
break;
|
2023-10-11 04:51:51 -07:00
|
|
|
|
|
|
|
case HLSL_IR_SWITCH:
|
|
|
|
free_ir_switch(hlsl_ir_switch(node));
|
|
|
|
break;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-16 12:52:10 -07:00
|
|
|
void hlsl_free_attribute(struct hlsl_attribute *attr)
|
|
|
|
{
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
for (i = 0; i < attr->args_count; ++i)
|
|
|
|
hlsl_src_remove(&attr->args[i]);
|
2022-11-14 18:50:17 -08:00
|
|
|
hlsl_block_cleanup(&attr->instrs);
|
2021-08-16 12:52:10 -07:00
|
|
|
vkd3d_free((void *)attr->name);
|
|
|
|
vkd3d_free(attr);
|
|
|
|
}
|
|
|
|
|
2023-01-31 17:44:46 -08:00
|
|
|
void hlsl_cleanup_semantic(struct hlsl_semantic *semantic)
|
|
|
|
{
|
|
|
|
vkd3d_free((void *)semantic->name);
|
|
|
|
memset(semantic, 0, sizeof(*semantic));
|
|
|
|
}
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
static void free_function_decl(struct hlsl_ir_function_decl *decl)
|
|
|
|
{
|
2021-08-16 12:52:10 -07:00
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
for (i = 0; i < decl->attr_count; ++i)
|
|
|
|
hlsl_free_attribute((void *)decl->attrs[i]);
|
|
|
|
vkd3d_free((void *)decl->attrs);
|
|
|
|
|
2023-01-31 17:27:01 -08:00
|
|
|
vkd3d_free(decl->parameters.vars);
|
2023-04-14 11:02:15 -07:00
|
|
|
hlsl_block_cleanup(&decl->body);
|
2021-01-27 08:29:44 -08:00
|
|
|
vkd3d_free(decl);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void free_function(struct hlsl_ir_function *func)
|
|
|
|
{
|
vkd3d-shader/hlsl: Store function overloads in a list.
The choice to store them in an rbtree was made early on. It does not seem likely
that HLSL programs would define many overloads for any of their functions, but I
suspect the idea was rather that intrinsics would be defined as plain
hlsl_ir_function_decl structures [cf. 447463e5900ca6a636998a65429b8a08a5441657]
and that some intrinsics that could operate on any type would therefore need
many overrides.
This is not how we deal with intrinsics, however. When the first intrinsics were
implemented I made the choice disregard this intended design, and instead match
and convert their types manually, in C. Nothing that has happened in the time
since has led me to question that choice, and in fact, the flexibility with
which we must accommodate functions has led me to believe that matching in this
way was definitely the right choice. The main other designs I see would have
been:
* define each intrinsic variant separately using existing HLSL types. Besides
efficiency concerns (i.e. this would take more space in memory, and would take
longer to generate each variant), the normal type-matching rules don't really
apply to intrinsics.
[For example: elementwise intrinsics like abs() return the same type as the
input, including preserving the distinction between float and float1. It is
legal to define separate HLSL overloads taking float and float1, but trying to
invoke these functions yields an "ambiguous function call" error.]
* introduce new (semi-)generic types. This is far more code and ends up acting
like our current scheme (with helpers) in a slightly more complex form.
So I think we can go ahead and rip out this vestige of the original design for
intrinsics.
As for why to change it: rbtrees are simply more complex to deal with, and it
seems unlikely to me that the difference is going to matter. I do not expect any
program to define large quantities of intrinsics; linked list search should be
good enough.
2023-09-08 14:27:10 -07:00
|
|
|
struct hlsl_ir_function_decl *decl, *next;
|
|
|
|
|
|
|
|
LIST_FOR_EACH_ENTRY_SAFE(decl, next, &func->overloads, struct hlsl_ir_function_decl, entry)
|
|
|
|
free_function_decl(decl);
|
2021-01-27 08:29:44 -08:00
|
|
|
vkd3d_free((void *)func->name);
|
|
|
|
vkd3d_free(func);
|
|
|
|
}
|
|
|
|
|
2021-02-04 14:33:50 -08:00
|
|
|
static void free_function_rb(struct rb_entry *entry, void *context)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
free_function(RB_ENTRY_VALUE(entry, struct hlsl_ir_function, entry));
|
|
|
|
}
|
|
|
|
|
2022-11-10 17:30:03 -08:00
|
|
|
void hlsl_add_function(struct hlsl_ctx *ctx, char *name, struct hlsl_ir_function_decl *decl)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
struct hlsl_ir_function *func;
|
2023-01-31 17:59:06 -08:00
|
|
|
struct rb_entry *func_entry;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2023-08-07 15:20:10 -07:00
|
|
|
if (ctx->internal_func_name)
|
|
|
|
{
|
|
|
|
char *internal_name;
|
|
|
|
|
|
|
|
if (!(internal_name = hlsl_strdup(ctx, ctx->internal_func_name)))
|
|
|
|
return;
|
|
|
|
vkd3d_free(name);
|
|
|
|
name = internal_name;
|
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
func_entry = rb_get(&ctx->functions, name);
|
2021-01-27 08:29:44 -08:00
|
|
|
if (func_entry)
|
|
|
|
{
|
|
|
|
func = RB_ENTRY_VALUE(func_entry, struct hlsl_ir_function, entry);
|
|
|
|
decl->func = func;
|
vkd3d-shader/hlsl: Store function overloads in a list.
The choice to store them in an rbtree was made early on. It does not seem likely
that HLSL programs would define many overloads for any of their functions, but I
suspect the idea was rather that intrinsics would be defined as plain
hlsl_ir_function_decl structures [cf. 447463e5900ca6a636998a65429b8a08a5441657]
and that some intrinsics that could operate on any type would therefore need
many overrides.
This is not how we deal with intrinsics, however. When the first intrinsics were
implemented I made the choice disregard this intended design, and instead match
and convert their types manually, in C. Nothing that has happened in the time
since has led me to question that choice, and in fact, the flexibility with
which we must accommodate functions has led me to believe that matching in this
way was definitely the right choice. The main other designs I see would have
been:
* define each intrinsic variant separately using existing HLSL types. Besides
efficiency concerns (i.e. this would take more space in memory, and would take
longer to generate each variant), the normal type-matching rules don't really
apply to intrinsics.
[For example: elementwise intrinsics like abs() return the same type as the
input, including preserving the distinction between float and float1. It is
legal to define separate HLSL overloads taking float and float1, but trying to
invoke these functions yields an "ambiguous function call" error.]
* introduce new (semi-)generic types. This is far more code and ends up acting
like our current scheme (with helpers) in a slightly more complex form.
So I think we can go ahead and rip out this vestige of the original design for
intrinsics.
As for why to change it: rbtrees are simply more complex to deal with, and it
seems unlikely to me that the difference is going to matter. I do not expect any
program to define large quantities of intrinsics; linked list search should be
good enough.
2023-09-08 14:27:10 -07:00
|
|
|
list_add_tail(&func->overloads, &decl->entry);
|
2021-01-27 08:29:44 -08:00
|
|
|
vkd3d_free(name);
|
|
|
|
return;
|
|
|
|
}
|
2021-05-20 22:32:20 -07:00
|
|
|
func = hlsl_alloc(ctx, sizeof(*func));
|
2021-01-27 08:29:44 -08:00
|
|
|
func->name = name;
|
vkd3d-shader/hlsl: Store function overloads in a list.
The choice to store them in an rbtree was made early on. It does not seem likely
that HLSL programs would define many overloads for any of their functions, but I
suspect the idea was rather that intrinsics would be defined as plain
hlsl_ir_function_decl structures [cf. 447463e5900ca6a636998a65429b8a08a5441657]
and that some intrinsics that could operate on any type would therefore need
many overrides.
This is not how we deal with intrinsics, however. When the first intrinsics were
implemented I made the choice disregard this intended design, and instead match
and convert their types manually, in C. Nothing that has happened in the time
since has led me to question that choice, and in fact, the flexibility with
which we must accommodate functions has led me to believe that matching in this
way was definitely the right choice. The main other designs I see would have
been:
* define each intrinsic variant separately using existing HLSL types. Besides
efficiency concerns (i.e. this would take more space in memory, and would take
longer to generate each variant), the normal type-matching rules don't really
apply to intrinsics.
[For example: elementwise intrinsics like abs() return the same type as the
input, including preserving the distinction between float and float1. It is
legal to define separate HLSL overloads taking float and float1, but trying to
invoke these functions yields an "ambiguous function call" error.]
* introduce new (semi-)generic types. This is far more code and ends up acting
like our current scheme (with helpers) in a slightly more complex form.
So I think we can go ahead and rip out this vestige of the original design for
intrinsics.
As for why to change it: rbtrees are simply more complex to deal with, and it
seems unlikely to me that the difference is going to matter. I do not expect any
program to define large quantities of intrinsics; linked list search should be
good enough.
2023-09-08 14:27:10 -07:00
|
|
|
list_init(&func->overloads);
|
2021-01-27 08:29:44 -08:00
|
|
|
decl->func = func;
|
vkd3d-shader/hlsl: Store function overloads in a list.
The choice to store them in an rbtree was made early on. It does not seem likely
that HLSL programs would define many overloads for any of their functions, but I
suspect the idea was rather that intrinsics would be defined as plain
hlsl_ir_function_decl structures [cf. 447463e5900ca6a636998a65429b8a08a5441657]
and that some intrinsics that could operate on any type would therefore need
many overrides.
This is not how we deal with intrinsics, however. When the first intrinsics were
implemented I made the choice disregard this intended design, and instead match
and convert their types manually, in C. Nothing that has happened in the time
since has led me to question that choice, and in fact, the flexibility with
which we must accommodate functions has led me to believe that matching in this
way was definitely the right choice. The main other designs I see would have
been:
* define each intrinsic variant separately using existing HLSL types. Besides
efficiency concerns (i.e. this would take more space in memory, and would take
longer to generate each variant), the normal type-matching rules don't really
apply to intrinsics.
[For example: elementwise intrinsics like abs() return the same type as the
input, including preserving the distinction between float and float1. It is
legal to define separate HLSL overloads taking float and float1, but trying to
invoke these functions yields an "ambiguous function call" error.]
* introduce new (semi-)generic types. This is far more code and ends up acting
like our current scheme (with helpers) in a slightly more complex form.
So I think we can go ahead and rip out this vestige of the original design for
intrinsics.
As for why to change it: rbtrees are simply more complex to deal with, and it
seems unlikely to me that the difference is going to matter. I do not expect any
program to define large quantities of intrinsics; linked list search should be
good enough.
2023-09-08 14:27:10 -07:00
|
|
|
list_add_tail(&func->overloads, &decl->entry);
|
2021-05-20 22:32:20 -07:00
|
|
|
rb_put(&ctx->functions, func->name, &func->entry);
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
|
|
|
|
2021-08-09 19:56:17 -07:00
|
|
|
unsigned int hlsl_map_swizzle(unsigned int swizzle, unsigned int writemask)
|
|
|
|
{
|
|
|
|
unsigned int i, ret = 0;
|
|
|
|
|
|
|
|
/* Leave replicate swizzles alone; some instructions need them. */
|
|
|
|
if (swizzle == HLSL_SWIZZLE(X, X, X, X)
|
|
|
|
|| swizzle == HLSL_SWIZZLE(Y, Y, Y, Y)
|
|
|
|
|| swizzle == HLSL_SWIZZLE(Z, Z, Z, Z)
|
|
|
|
|| swizzle == HLSL_SWIZZLE(W, W, W, W))
|
|
|
|
return swizzle;
|
|
|
|
|
|
|
|
for (i = 0; i < 4; ++i)
|
|
|
|
{
|
|
|
|
if (writemask & (1 << i))
|
|
|
|
{
|
|
|
|
ret |= (swizzle & 3) << (i * 2);
|
|
|
|
swizzle >>= 2;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned int hlsl_swizzle_from_writemask(unsigned int writemask)
|
|
|
|
{
|
|
|
|
static const unsigned int swizzles[16] =
|
|
|
|
{
|
|
|
|
0,
|
|
|
|
HLSL_SWIZZLE(X, X, X, X),
|
|
|
|
HLSL_SWIZZLE(Y, Y, Y, Y),
|
|
|
|
HLSL_SWIZZLE(X, Y, X, X),
|
|
|
|
HLSL_SWIZZLE(Z, Z, Z, Z),
|
|
|
|
HLSL_SWIZZLE(X, Z, X, X),
|
|
|
|
HLSL_SWIZZLE(Y, Z, X, X),
|
|
|
|
HLSL_SWIZZLE(X, Y, Z, X),
|
|
|
|
HLSL_SWIZZLE(W, W, W, W),
|
|
|
|
HLSL_SWIZZLE(X, W, X, X),
|
|
|
|
HLSL_SWIZZLE(Y, W, X, X),
|
|
|
|
HLSL_SWIZZLE(X, Y, W, X),
|
|
|
|
HLSL_SWIZZLE(Z, W, X, X),
|
|
|
|
HLSL_SWIZZLE(X, Z, W, X),
|
|
|
|
HLSL_SWIZZLE(Y, Z, W, X),
|
|
|
|
HLSL_SWIZZLE(X, Y, Z, W),
|
|
|
|
};
|
|
|
|
|
|
|
|
return swizzles[writemask & 0xf];
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned int hlsl_combine_writemasks(unsigned int first, unsigned int second)
|
|
|
|
{
|
|
|
|
unsigned int ret = 0, i, j = 0;
|
|
|
|
|
|
|
|
for (i = 0; i < 4; ++i)
|
|
|
|
{
|
|
|
|
if (first & (1 << i))
|
|
|
|
{
|
|
|
|
if (second & (1 << j++))
|
|
|
|
ret |= (1 << i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
unsigned int hlsl_combine_swizzles(unsigned int first, unsigned int second, unsigned int dim)
|
|
|
|
{
|
|
|
|
unsigned int ret = 0, i;
|
|
|
|
for (i = 0; i < dim; ++i)
|
|
|
|
{
|
2023-01-12 13:52:49 -08:00
|
|
|
unsigned int s = hlsl_swizzle_get_component(second, i);
|
|
|
|
ret |= hlsl_swizzle_get_component(first, s) << HLSL_SWIZZLE_SHIFT(i);
|
2021-08-09 19:56:17 -07:00
|
|
|
}
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2023-10-21 12:35:08 -07:00
|
|
|
const struct hlsl_profile_info *hlsl_get_target_info(const char *target)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
static const struct hlsl_profile_info profiles[] =
|
|
|
|
{
|
2021-02-02 14:11:17 -08:00
|
|
|
{"cs_4_0", VKD3D_SHADER_TYPE_COMPUTE, 4, 0, 0, 0, false},
|
|
|
|
{"cs_4_1", VKD3D_SHADER_TYPE_COMPUTE, 4, 1, 0, 0, false},
|
|
|
|
{"cs_5_0", VKD3D_SHADER_TYPE_COMPUTE, 5, 0, 0, 0, false},
|
|
|
|
{"ds_5_0", VKD3D_SHADER_TYPE_DOMAIN, 5, 0, 0, 0, false},
|
|
|
|
{"fx_2_0", VKD3D_SHADER_TYPE_EFFECT, 2, 0, 0, 0, false},
|
|
|
|
{"fx_4_0", VKD3D_SHADER_TYPE_EFFECT, 4, 0, 0, 0, false},
|
|
|
|
{"fx_4_1", VKD3D_SHADER_TYPE_EFFECT, 4, 1, 0, 0, false},
|
|
|
|
{"fx_5_0", VKD3D_SHADER_TYPE_EFFECT, 5, 0, 0, 0, false},
|
|
|
|
{"gs_4_0", VKD3D_SHADER_TYPE_GEOMETRY, 4, 0, 0, 0, false},
|
|
|
|
{"gs_4_1", VKD3D_SHADER_TYPE_GEOMETRY, 4, 1, 0, 0, false},
|
|
|
|
{"gs_5_0", VKD3D_SHADER_TYPE_GEOMETRY, 5, 0, 0, 0, false},
|
|
|
|
{"hs_5_0", VKD3D_SHADER_TYPE_HULL, 5, 0, 0, 0, false},
|
|
|
|
{"ps.1.0", VKD3D_SHADER_TYPE_PIXEL, 1, 0, 0, 0, false},
|
|
|
|
{"ps.1.1", VKD3D_SHADER_TYPE_PIXEL, 1, 1, 0, 0, false},
|
|
|
|
{"ps.1.2", VKD3D_SHADER_TYPE_PIXEL, 1, 2, 0, 0, false},
|
|
|
|
{"ps.1.3", VKD3D_SHADER_TYPE_PIXEL, 1, 3, 0, 0, false},
|
|
|
|
{"ps.1.4", VKD3D_SHADER_TYPE_PIXEL, 1, 4, 0, 0, false},
|
|
|
|
{"ps.2.0", VKD3D_SHADER_TYPE_PIXEL, 2, 0, 0, 0, false},
|
|
|
|
{"ps.2.a", VKD3D_SHADER_TYPE_PIXEL, 2, 1, 0, 0, false},
|
|
|
|
{"ps.2.b", VKD3D_SHADER_TYPE_PIXEL, 2, 2, 0, 0, false},
|
|
|
|
{"ps.2.sw", VKD3D_SHADER_TYPE_PIXEL, 2, 0, 0, 0, true},
|
|
|
|
{"ps.3.0", VKD3D_SHADER_TYPE_PIXEL, 3, 0, 0, 0, false},
|
|
|
|
{"ps_1_0", VKD3D_SHADER_TYPE_PIXEL, 1, 0, 0, 0, false},
|
|
|
|
{"ps_1_1", VKD3D_SHADER_TYPE_PIXEL, 1, 1, 0, 0, false},
|
|
|
|
{"ps_1_2", VKD3D_SHADER_TYPE_PIXEL, 1, 2, 0, 0, false},
|
|
|
|
{"ps_1_3", VKD3D_SHADER_TYPE_PIXEL, 1, 3, 0, 0, false},
|
|
|
|
{"ps_1_4", VKD3D_SHADER_TYPE_PIXEL, 1, 4, 0, 0, false},
|
|
|
|
{"ps_2_0", VKD3D_SHADER_TYPE_PIXEL, 2, 0, 0, 0, false},
|
|
|
|
{"ps_2_a", VKD3D_SHADER_TYPE_PIXEL, 2, 1, 0, 0, false},
|
|
|
|
{"ps_2_b", VKD3D_SHADER_TYPE_PIXEL, 2, 2, 0, 0, false},
|
|
|
|
{"ps_2_sw", VKD3D_SHADER_TYPE_PIXEL, 2, 0, 0, 0, true},
|
|
|
|
{"ps_3_0", VKD3D_SHADER_TYPE_PIXEL, 3, 0, 0, 0, false},
|
|
|
|
{"ps_3_sw", VKD3D_SHADER_TYPE_PIXEL, 3, 0, 0, 0, true},
|
|
|
|
{"ps_4_0", VKD3D_SHADER_TYPE_PIXEL, 4, 0, 0, 0, false},
|
|
|
|
{"ps_4_0_level_9_0", VKD3D_SHADER_TYPE_PIXEL, 4, 0, 9, 0, false},
|
|
|
|
{"ps_4_0_level_9_1", VKD3D_SHADER_TYPE_PIXEL, 4, 0, 9, 1, false},
|
|
|
|
{"ps_4_0_level_9_3", VKD3D_SHADER_TYPE_PIXEL, 4, 0, 9, 3, false},
|
|
|
|
{"ps_4_1", VKD3D_SHADER_TYPE_PIXEL, 4, 1, 0, 0, false},
|
|
|
|
{"ps_5_0", VKD3D_SHADER_TYPE_PIXEL, 5, 0, 0, 0, false},
|
|
|
|
{"tx_1_0", VKD3D_SHADER_TYPE_TEXTURE, 1, 0, 0, 0, false},
|
|
|
|
{"vs.1.0", VKD3D_SHADER_TYPE_VERTEX, 1, 0, 0, 0, false},
|
|
|
|
{"vs.1.1", VKD3D_SHADER_TYPE_VERTEX, 1, 1, 0, 0, false},
|
|
|
|
{"vs.2.0", VKD3D_SHADER_TYPE_VERTEX, 2, 0, 0, 0, false},
|
|
|
|
{"vs.2.a", VKD3D_SHADER_TYPE_VERTEX, 2, 1, 0, 0, false},
|
|
|
|
{"vs.2.sw", VKD3D_SHADER_TYPE_VERTEX, 2, 0, 0, 0, true},
|
|
|
|
{"vs.3.0", VKD3D_SHADER_TYPE_VERTEX, 3, 0, 0, 0, false},
|
|
|
|
{"vs.3.sw", VKD3D_SHADER_TYPE_VERTEX, 3, 0, 0, 0, true},
|
|
|
|
{"vs_1_0", VKD3D_SHADER_TYPE_VERTEX, 1, 0, 0, 0, false},
|
|
|
|
{"vs_1_1", VKD3D_SHADER_TYPE_VERTEX, 1, 1, 0, 0, false},
|
|
|
|
{"vs_2_0", VKD3D_SHADER_TYPE_VERTEX, 2, 0, 0, 0, false},
|
|
|
|
{"vs_2_a", VKD3D_SHADER_TYPE_VERTEX, 2, 1, 0, 0, false},
|
|
|
|
{"vs_2_sw", VKD3D_SHADER_TYPE_VERTEX, 2, 0, 0, 0, true},
|
|
|
|
{"vs_3_0", VKD3D_SHADER_TYPE_VERTEX, 3, 0, 0, 0, false},
|
|
|
|
{"vs_3_sw", VKD3D_SHADER_TYPE_VERTEX, 3, 0, 0, 0, true},
|
|
|
|
{"vs_4_0", VKD3D_SHADER_TYPE_VERTEX, 4, 0, 0, 0, false},
|
|
|
|
{"vs_4_0_level_9_0", VKD3D_SHADER_TYPE_VERTEX, 4, 0, 9, 0, false},
|
|
|
|
{"vs_4_0_level_9_1", VKD3D_SHADER_TYPE_VERTEX, 4, 0, 9, 1, false},
|
|
|
|
{"vs_4_0_level_9_3", VKD3D_SHADER_TYPE_VERTEX, 4, 0, 9, 3, false},
|
|
|
|
{"vs_4_1", VKD3D_SHADER_TYPE_VERTEX, 4, 1, 0, 0, false},
|
|
|
|
{"vs_5_0", VKD3D_SHADER_TYPE_VERTEX, 5, 0, 0, 0, false},
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
|
|
|
for (i = 0; i < ARRAY_SIZE(profiles); ++i)
|
|
|
|
{
|
|
|
|
if (!strcmp(target, profiles[i].name))
|
|
|
|
return &profiles[i];
|
|
|
|
}
|
|
|
|
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2021-02-04 14:33:50 -08:00
|
|
|
static int compare_function_rb(const void *key, const struct rb_entry *entry)
|
|
|
|
{
|
|
|
|
const char *name = key;
|
|
|
|
const struct hlsl_ir_function *func = RB_ENTRY_VALUE(entry, const struct hlsl_ir_function,entry);
|
|
|
|
|
|
|
|
return strcmp(name, func->name);
|
|
|
|
}
|
|
|
|
|
2021-02-04 14:33:53 -08:00
|
|
|
static void declare_predefined_types(struct hlsl_ctx *ctx)
|
2021-02-04 14:33:50 -08:00
|
|
|
{
|
vkd3d-shader/hlsl: Reinterpret minimum precision types as their regular counterparts.
Reinterpret min16float, min10float, min16int, min12int, and min16uint
as their regular counterparts: float, float, int, int, uint,
respectively.
A proper implementation would require adding minimum precision
indicators to all the dxbc-tpf instructions that use these types.
Consider the output of fxc 10.1 with the following shader:
uniform int i;
float4 main() : sv_target
{
min16float4 a = {0, 1, 2, i};
min16int2 b = {4, i};
min10float3 c = {6.4, 7, i};
min12int d = 9.4;
min16uint4x2 e = {14.4, 15, 16, 17, 18, 19, 20, i};
return mul(e, b) + a + c.xyzx + d;
}
However, if the graphics driver doesn't have minimum precision support,
it ignores the minimum precision indicators and runs at 32-bit
precision, which is equivalent as working with regular types.
2022-12-07 13:06:06 -08:00
|
|
|
unsigned int x, y, bt, i, v;
|
2021-02-04 14:33:50 -08:00
|
|
|
struct hlsl_type *type;
|
2021-02-04 14:33:51 -08:00
|
|
|
|
2021-02-04 14:33:50 -08:00
|
|
|
static const char * const names[] =
|
|
|
|
{
|
|
|
|
"float",
|
|
|
|
"half",
|
|
|
|
"double",
|
|
|
|
"int",
|
|
|
|
"uint",
|
|
|
|
"bool",
|
|
|
|
};
|
vkd3d-shader/hlsl: Reinterpret minimum precision types as their regular counterparts.
Reinterpret min16float, min10float, min16int, min12int, and min16uint
as their regular counterparts: float, float, int, int, uint,
respectively.
A proper implementation would require adding minimum precision
indicators to all the dxbc-tpf instructions that use these types.
Consider the output of fxc 10.1 with the following shader:
uniform int i;
float4 main() : sv_target
{
min16float4 a = {0, 1, 2, i};
min16int2 b = {4, i};
min10float3 c = {6.4, 7, i};
min12int d = 9.4;
min16uint4x2 e = {14.4, 15, 16, 17, 18, 19, 20, i};
return mul(e, b) + a + c.xyzx + d;
}
However, if the graphics driver doesn't have minimum precision support,
it ignores the minimum precision indicators and runs at 32-bit
precision, which is equivalent as working with regular types.
2022-12-07 13:06:06 -08:00
|
|
|
char name[15];
|
|
|
|
|
|
|
|
static const char *const variants_float[] = {"min10float", "min16float"};
|
|
|
|
static const char *const variants_int[] = {"min12int", "min16int"};
|
|
|
|
static const char *const variants_uint[] = {"min16uint"};
|
2021-02-04 14:33:50 -08:00
|
|
|
|
|
|
|
static const char *const sampler_names[] =
|
|
|
|
{
|
2023-05-16 11:40:10 -07:00
|
|
|
[HLSL_SAMPLER_DIM_GENERIC] = "sampler",
|
|
|
|
[HLSL_SAMPLER_DIM_COMPARISON] = "SamplerComparisonState",
|
|
|
|
[HLSL_SAMPLER_DIM_1D] = "sampler1D",
|
|
|
|
[HLSL_SAMPLER_DIM_2D] = "sampler2D",
|
|
|
|
[HLSL_SAMPLER_DIM_3D] = "sampler3D",
|
|
|
|
[HLSL_SAMPLER_DIM_CUBE] = "samplerCUBE",
|
2021-02-04 14:33:50 -08:00
|
|
|
};
|
|
|
|
|
2021-02-04 14:33:51 -08:00
|
|
|
static const struct
|
|
|
|
{
|
|
|
|
char name[13];
|
|
|
|
enum hlsl_type_class class;
|
|
|
|
enum hlsl_base_type base_type;
|
|
|
|
unsigned int dimx, dimy;
|
|
|
|
}
|
|
|
|
effect_types[] =
|
|
|
|
{
|
2023-02-21 03:55:08 -08:00
|
|
|
{"dword", HLSL_CLASS_SCALAR, HLSL_TYPE_UINT, 1, 1},
|
2023-02-21 14:59:04 -08:00
|
|
|
{"float", HLSL_CLASS_SCALAR, HLSL_TYPE_FLOAT, 1, 1},
|
2023-02-26 06:25:06 -08:00
|
|
|
{"vector", HLSL_CLASS_VECTOR, HLSL_TYPE_FLOAT, 4, 1},
|
2023-02-24 02:39:45 -08:00
|
|
|
{"matrix", HLSL_CLASS_MATRIX, HLSL_TYPE_FLOAT, 4, 4},
|
2021-02-04 14:33:51 -08:00
|
|
|
{"STRING", HLSL_CLASS_OBJECT, HLSL_TYPE_STRING, 1, 1},
|
|
|
|
{"TEXTURE", HLSL_CLASS_OBJECT, HLSL_TYPE_TEXTURE, 1, 1},
|
|
|
|
{"PIXELSHADER", HLSL_CLASS_OBJECT, HLSL_TYPE_PIXELSHADER, 1, 1},
|
|
|
|
{"VERTEXSHADER", HLSL_CLASS_OBJECT, HLSL_TYPE_VERTEXSHADER, 1, 1},
|
|
|
|
};
|
|
|
|
|
2021-02-04 14:33:50 -08:00
|
|
|
for (bt = 0; bt <= HLSL_TYPE_LAST_SCALAR; ++bt)
|
|
|
|
{
|
|
|
|
for (y = 1; y <= 4; ++y)
|
|
|
|
{
|
|
|
|
for (x = 1; x <= 4; ++x)
|
|
|
|
{
|
|
|
|
sprintf(name, "%s%ux%u", names[bt], y, x);
|
2021-09-27 18:51:47 -07:00
|
|
|
type = hlsl_new_type(ctx, name, HLSL_CLASS_MATRIX, bt, x, y);
|
2021-02-04 14:33:53 -08:00
|
|
|
hlsl_scope_add_type(ctx->globals, type);
|
2021-09-27 18:51:46 -07:00
|
|
|
ctx->builtin_types.matrix[bt][x - 1][y - 1] = type;
|
2021-02-04 14:33:50 -08:00
|
|
|
|
|
|
|
if (y == 1)
|
|
|
|
{
|
|
|
|
sprintf(name, "%s%u", names[bt], x);
|
2021-09-27 18:51:47 -07:00
|
|
|
type = hlsl_new_type(ctx, name, HLSL_CLASS_VECTOR, bt, x, y);
|
2021-02-04 14:33:53 -08:00
|
|
|
hlsl_scope_add_type(ctx->globals, type);
|
|
|
|
ctx->builtin_types.vector[bt][x - 1] = type;
|
2021-02-04 14:33:50 -08:00
|
|
|
|
|
|
|
if (x == 1)
|
|
|
|
{
|
|
|
|
sprintf(name, "%s", names[bt]);
|
2021-09-27 18:51:47 -07:00
|
|
|
type = hlsl_new_type(ctx, name, HLSL_CLASS_SCALAR, bt, x, y);
|
2021-02-04 14:33:53 -08:00
|
|
|
hlsl_scope_add_type(ctx->globals, type);
|
|
|
|
ctx->builtin_types.scalar[bt] = type;
|
2021-02-04 14:33:50 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
vkd3d-shader/hlsl: Reinterpret minimum precision types as their regular counterparts.
Reinterpret min16float, min10float, min16int, min12int, and min16uint
as their regular counterparts: float, float, int, int, uint,
respectively.
A proper implementation would require adding minimum precision
indicators to all the dxbc-tpf instructions that use these types.
Consider the output of fxc 10.1 with the following shader:
uniform int i;
float4 main() : sv_target
{
min16float4 a = {0, 1, 2, i};
min16int2 b = {4, i};
min10float3 c = {6.4, 7, i};
min12int d = 9.4;
min16uint4x2 e = {14.4, 15, 16, 17, 18, 19, 20, i};
return mul(e, b) + a + c.xyzx + d;
}
However, if the graphics driver doesn't have minimum precision support,
it ignores the minimum precision indicators and runs at 32-bit
precision, which is equivalent as working with regular types.
2022-12-07 13:06:06 -08:00
|
|
|
for (bt = 0; bt <= HLSL_TYPE_LAST_SCALAR; ++bt)
|
|
|
|
{
|
|
|
|
const char *const *variants;
|
2023-05-25 03:44:26 -07:00
|
|
|
unsigned int n_variants;
|
vkd3d-shader/hlsl: Reinterpret minimum precision types as their regular counterparts.
Reinterpret min16float, min10float, min16int, min12int, and min16uint
as their regular counterparts: float, float, int, int, uint,
respectively.
A proper implementation would require adding minimum precision
indicators to all the dxbc-tpf instructions that use these types.
Consider the output of fxc 10.1 with the following shader:
uniform int i;
float4 main() : sv_target
{
min16float4 a = {0, 1, 2, i};
min16int2 b = {4, i};
min10float3 c = {6.4, 7, i};
min12int d = 9.4;
min16uint4x2 e = {14.4, 15, 16, 17, 18, 19, 20, i};
return mul(e, b) + a + c.xyzx + d;
}
However, if the graphics driver doesn't have minimum precision support,
it ignores the minimum precision indicators and runs at 32-bit
precision, which is equivalent as working with regular types.
2022-12-07 13:06:06 -08:00
|
|
|
|
|
|
|
switch (bt)
|
|
|
|
{
|
|
|
|
case HLSL_TYPE_FLOAT:
|
|
|
|
variants = variants_float;
|
|
|
|
n_variants = ARRAY_SIZE(variants_float);
|
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_TYPE_INT:
|
|
|
|
variants = variants_int;
|
|
|
|
n_variants = ARRAY_SIZE(variants_int);
|
|
|
|
break;
|
|
|
|
|
|
|
|
case HLSL_TYPE_UINT:
|
|
|
|
variants = variants_uint;
|
|
|
|
n_variants = ARRAY_SIZE(variants_uint);
|
|
|
|
break;
|
|
|
|
|
|
|
|
default:
|
2023-05-25 03:44:26 -07:00
|
|
|
n_variants = 0;
|
|
|
|
variants = NULL;
|
vkd3d-shader/hlsl: Reinterpret minimum precision types as their regular counterparts.
Reinterpret min16float, min10float, min16int, min12int, and min16uint
as their regular counterparts: float, float, int, int, uint,
respectively.
A proper implementation would require adding minimum precision
indicators to all the dxbc-tpf instructions that use these types.
Consider the output of fxc 10.1 with the following shader:
uniform int i;
float4 main() : sv_target
{
min16float4 a = {0, 1, 2, i};
min16int2 b = {4, i};
min10float3 c = {6.4, 7, i};
min12int d = 9.4;
min16uint4x2 e = {14.4, 15, 16, 17, 18, 19, 20, i};
return mul(e, b) + a + c.xyzx + d;
}
However, if the graphics driver doesn't have minimum precision support,
it ignores the minimum precision indicators and runs at 32-bit
precision, which is equivalent as working with regular types.
2022-12-07 13:06:06 -08:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
for (v = 0; v < n_variants; ++v)
|
|
|
|
{
|
|
|
|
for (y = 1; y <= 4; ++y)
|
|
|
|
{
|
|
|
|
for (x = 1; x <= 4; ++x)
|
|
|
|
{
|
|
|
|
sprintf(name, "%s%ux%u", variants[v], y, x);
|
|
|
|
type = hlsl_new_type(ctx, name, HLSL_CLASS_MATRIX, bt, x, y);
|
|
|
|
type->is_minimum_precision = 1;
|
|
|
|
hlsl_scope_add_type(ctx->globals, type);
|
|
|
|
|
|
|
|
if (y == 1)
|
|
|
|
{
|
|
|
|
sprintf(name, "%s%u", variants[v], x);
|
|
|
|
type = hlsl_new_type(ctx, name, HLSL_CLASS_VECTOR, bt, x, y);
|
|
|
|
type->is_minimum_precision = 1;
|
|
|
|
hlsl_scope_add_type(ctx->globals, type);
|
|
|
|
|
|
|
|
if (x == 1)
|
|
|
|
{
|
|
|
|
sprintf(name, "%s", variants[v]);
|
|
|
|
type = hlsl_new_type(ctx, name, HLSL_CLASS_SCALAR, bt, x, y);
|
|
|
|
type->is_minimum_precision = 1;
|
|
|
|
hlsl_scope_add_type(ctx->globals, type);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-01-27 10:31:23 -08:00
|
|
|
for (bt = 0; bt <= HLSL_SAMPLER_DIM_LAST_SAMPLER; ++bt)
|
2021-02-04 14:33:50 -08:00
|
|
|
{
|
2021-09-27 18:51:47 -07:00
|
|
|
type = hlsl_new_type(ctx, sampler_names[bt], HLSL_CLASS_OBJECT, HLSL_TYPE_SAMPLER, 1, 1);
|
2021-02-04 14:33:50 -08:00
|
|
|
type->sampler_dim = bt;
|
2021-02-04 14:33:53 -08:00
|
|
|
ctx->builtin_types.sampler[bt] = type;
|
2021-02-04 14:33:50 -08:00
|
|
|
}
|
|
|
|
|
2021-09-27 18:51:47 -07:00
|
|
|
ctx->builtin_types.Void = hlsl_new_type(ctx, "void", HLSL_CLASS_OBJECT, HLSL_TYPE_VOID, 1, 1);
|
2021-02-04 14:33:50 -08:00
|
|
|
|
2021-02-04 14:33:51 -08:00
|
|
|
for (i = 0; i < ARRAY_SIZE(effect_types); ++i)
|
|
|
|
{
|
2021-09-27 18:51:47 -07:00
|
|
|
type = hlsl_new_type(ctx, effect_types[i].name, effect_types[i].class,
|
2021-02-04 14:33:51 -08:00
|
|
|
effect_types[i].base_type, effect_types[i].dimx, effect_types[i].dimy);
|
2021-02-04 14:33:53 -08:00
|
|
|
hlsl_scope_add_type(ctx->globals, type);
|
2021-02-04 14:33:51 -08:00
|
|
|
}
|
2021-02-04 14:33:50 -08:00
|
|
|
}
|
|
|
|
|
2023-07-02 16:10:10 -07:00
|
|
|
static bool hlsl_ctx_init(struct hlsl_ctx *ctx, const struct vkd3d_shader_compile_info *compile_info,
|
2021-09-01 06:45:06 -07:00
|
|
|
const struct hlsl_profile_info *profile, struct vkd3d_shader_message_context *message_context)
|
2021-02-04 14:33:50 -08:00
|
|
|
{
|
2023-07-02 16:10:10 -07:00
|
|
|
unsigned int i;
|
|
|
|
|
2021-02-04 14:33:50 -08:00
|
|
|
memset(ctx, 0, sizeof(*ctx));
|
|
|
|
|
2021-04-08 21:38:25 -07:00
|
|
|
ctx->profile = profile;
|
|
|
|
|
2021-02-04 14:33:50 -08:00
|
|
|
ctx->message_context = message_context;
|
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
if (!(ctx->source_files = hlsl_alloc(ctx, sizeof(*ctx->source_files))))
|
2021-02-12 08:48:56 -08:00
|
|
|
return false;
|
2023-07-02 16:10:10 -07:00
|
|
|
if (!(ctx->source_files[0] = hlsl_strdup(ctx, compile_info->source_name ? compile_info->source_name : "<anonymous>")))
|
2021-02-04 14:33:50 -08:00
|
|
|
{
|
2021-02-12 08:48:56 -08:00
|
|
|
vkd3d_free(ctx->source_files);
|
2021-02-04 14:33:50 -08:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
ctx->source_files_count = 1;
|
2021-02-12 08:48:56 -08:00
|
|
|
ctx->location.source_name = ctx->source_files[0];
|
|
|
|
ctx->location.line = ctx->location.column = 1;
|
2021-02-27 16:03:09 -08:00
|
|
|
vkd3d_string_buffer_cache_init(&ctx->string_buffers);
|
2021-02-04 14:33:50 -08:00
|
|
|
|
|
|
|
list_init(&ctx->scopes);
|
2023-01-31 20:18:35 -08:00
|
|
|
|
|
|
|
if (!(ctx->dummy_scope = hlsl_new_scope(ctx, NULL)))
|
|
|
|
{
|
|
|
|
vkd3d_free((void *)ctx->source_files[0]);
|
|
|
|
vkd3d_free(ctx->source_files);
|
|
|
|
return false;
|
|
|
|
}
|
2021-02-04 14:33:50 -08:00
|
|
|
hlsl_push_scope(ctx);
|
|
|
|
ctx->globals = ctx->cur_scope;
|
|
|
|
|
|
|
|
list_init(&ctx->types);
|
2021-02-04 14:33:53 -08:00
|
|
|
declare_predefined_types(ctx);
|
2021-02-04 14:33:50 -08:00
|
|
|
|
|
|
|
rb_init(&ctx->functions, compare_function_rb);
|
|
|
|
|
2023-06-09 06:28:06 -07:00
|
|
|
hlsl_block_init(&ctx->static_initializers);
|
2021-04-15 17:03:44 -07:00
|
|
|
list_init(&ctx->extern_vars);
|
2021-06-21 21:37:10 -07:00
|
|
|
|
2021-06-21 21:37:09 -07:00
|
|
|
list_init(&ctx->buffers);
|
2021-02-04 14:33:50 -08:00
|
|
|
|
2021-06-21 21:37:10 -07:00
|
|
|
if (!(ctx->globals_buffer = hlsl_new_buffer(ctx, HLSL_BUFFER_CONSTANT,
|
2023-04-14 00:02:14 -07:00
|
|
|
hlsl_strdup(ctx, "$Globals"), NULL, &ctx->location)))
|
2021-06-21 21:37:10 -07:00
|
|
|
return false;
|
2021-07-08 19:13:18 -07:00
|
|
|
if (!(ctx->params_buffer = hlsl_new_buffer(ctx, HLSL_BUFFER_CONSTANT,
|
2023-04-14 00:02:14 -07:00
|
|
|
hlsl_strdup(ctx, "$Params"), NULL, &ctx->location)))
|
2021-07-08 19:13:18 -07:00
|
|
|
return false;
|
2021-06-21 21:37:10 -07:00
|
|
|
ctx->cur_buffer = ctx->globals_buffer;
|
|
|
|
|
2023-07-02 16:10:10 -07:00
|
|
|
for (i = 0; i < compile_info->option_count; ++i)
|
|
|
|
{
|
|
|
|
const struct vkd3d_shader_compile_option *option = &compile_info->options[i];
|
|
|
|
|
|
|
|
if (option->name == VKD3D_SHADER_COMPILE_OPTION_PACK_MATRIX_ORDER)
|
|
|
|
{
|
|
|
|
if (option->value == VKD3D_SHADER_COMPILE_OPTION_PACK_MATRIX_ROW_MAJOR)
|
|
|
|
ctx->matrix_majority = HLSL_MODIFIER_ROW_MAJOR;
|
|
|
|
else if (option->value == VKD3D_SHADER_COMPILE_OPTION_PACK_MATRIX_COLUMN_MAJOR)
|
|
|
|
ctx->matrix_majority = HLSL_MODIFIER_COLUMN_MAJOR;
|
|
|
|
}
|
2023-09-28 07:26:00 -07:00
|
|
|
else if (option->name == VKD3D_SHADER_COMPILE_OPTION_BACKWARD_COMPATIBILITY)
|
|
|
|
{
|
|
|
|
ctx->semantic_compat_mapping = option->value & VKD3D_SHADER_COMPILE_OPTION_BACKCOMPAT_MAP_SEMANTIC_NAMES;
|
|
|
|
}
|
2023-07-02 16:10:10 -07:00
|
|
|
}
|
|
|
|
|
2021-02-04 14:33:50 -08:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2021-02-04 14:33:53 -08:00
|
|
|
static void hlsl_ctx_cleanup(struct hlsl_ctx *ctx)
|
2021-02-04 14:33:50 -08:00
|
|
|
{
|
2021-06-21 21:37:09 -07:00
|
|
|
struct hlsl_buffer *buffer, *next_buffer;
|
2021-02-04 14:33:50 -08:00
|
|
|
struct hlsl_scope *scope, *next_scope;
|
|
|
|
struct hlsl_ir_var *var, *next_var;
|
|
|
|
struct hlsl_type *type, *next_type;
|
|
|
|
unsigned int i;
|
|
|
|
|
2023-06-30 15:41:20 -07:00
|
|
|
hlsl_block_cleanup(&ctx->static_initializers);
|
|
|
|
|
2021-02-04 14:33:50 -08:00
|
|
|
for (i = 0; i < ctx->source_files_count; ++i)
|
|
|
|
vkd3d_free((void *)ctx->source_files[i]);
|
|
|
|
vkd3d_free(ctx->source_files);
|
2021-02-27 16:03:09 -08:00
|
|
|
vkd3d_string_buffer_cache_cleanup(&ctx->string_buffers);
|
2021-02-04 14:33:50 -08:00
|
|
|
|
|
|
|
rb_destroy(&ctx->functions, free_function_rb, NULL);
|
|
|
|
|
|
|
|
LIST_FOR_EACH_ENTRY_SAFE(scope, next_scope, &ctx->scopes, struct hlsl_scope, entry)
|
|
|
|
{
|
|
|
|
LIST_FOR_EACH_ENTRY_SAFE(var, next_var, &scope->vars, struct hlsl_ir_var, scope_entry)
|
|
|
|
hlsl_free_var(var);
|
|
|
|
rb_destroy(&scope->types, NULL, NULL);
|
|
|
|
vkd3d_free(scope);
|
|
|
|
}
|
|
|
|
|
|
|
|
LIST_FOR_EACH_ENTRY_SAFE(type, next_type, &ctx->types, struct hlsl_type, entry)
|
|
|
|
hlsl_free_type(type);
|
2021-06-21 21:37:09 -07:00
|
|
|
|
|
|
|
LIST_FOR_EACH_ENTRY_SAFE(buffer, next_buffer, &ctx->buffers, struct hlsl_buffer, entry)
|
|
|
|
{
|
|
|
|
vkd3d_free((void *)buffer->name);
|
|
|
|
vkd3d_free(buffer);
|
|
|
|
}
|
2023-07-11 15:36:59 -07:00
|
|
|
|
|
|
|
vkd3d_free(ctx->constant_defs.regs);
|
2021-02-04 14:33:50 -08:00
|
|
|
}
|
|
|
|
|
2021-03-04 15:33:28 -08:00
|
|
|
int hlsl_compile_shader(const struct vkd3d_shader_code *hlsl, const struct vkd3d_shader_compile_info *compile_info,
|
2022-02-28 03:23:43 -08:00
|
|
|
struct vkd3d_shader_code *out, struct vkd3d_shader_message_context *message_context)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
const struct vkd3d_shader_hlsl_source_info *hlsl_source_info;
|
2021-09-11 16:18:50 -07:00
|
|
|
struct hlsl_ir_function_decl *decl, *entry_func = NULL;
|
2021-01-27 08:29:44 -08:00
|
|
|
const struct hlsl_profile_info *profile;
|
2021-09-11 16:18:50 -07:00
|
|
|
struct hlsl_ir_function *func;
|
2021-03-04 15:33:24 -08:00
|
|
|
const char *entry_point;
|
2021-02-04 14:33:53 -08:00
|
|
|
struct hlsl_ctx ctx;
|
2021-02-04 14:33:50 -08:00
|
|
|
int ret;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
|
|
|
if (!(hlsl_source_info = vkd3d_find_struct(compile_info->next, HLSL_SOURCE_INFO)))
|
|
|
|
{
|
|
|
|
ERR("No HLSL source info given.\n");
|
|
|
|
return VKD3D_ERROR_INVALID_ARGUMENT;
|
|
|
|
}
|
2021-03-04 15:33:24 -08:00
|
|
|
entry_point = hlsl_source_info->entry_point ? hlsl_source_info->entry_point : "main";
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2023-10-21 12:35:08 -07:00
|
|
|
if (!(profile = hlsl_get_target_info(hlsl_source_info->profile)))
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
FIXME("Unknown compilation target %s.\n", debugstr_a(hlsl_source_info->profile));
|
|
|
|
return VKD3D_ERROR_NOT_IMPLEMENTED;
|
|
|
|
}
|
|
|
|
|
2022-02-28 03:23:43 -08:00
|
|
|
if (compile_info->target_type == VKD3D_SHADER_TARGET_D3D_BYTECODE && profile->major_version > 3)
|
|
|
|
{
|
|
|
|
vkd3d_shader_error(message_context, NULL, VKD3D_SHADER_ERROR_HLSL_INCOMPATIBLE_PROFILE,
|
|
|
|
"The '%s' target profile is incompatible with the 'd3dbc' target type.", profile->name);
|
|
|
|
return VKD3D_ERROR_INVALID_ARGUMENT;
|
|
|
|
}
|
|
|
|
else if (compile_info->target_type == VKD3D_SHADER_TARGET_DXBC_TPF && profile->major_version < 4)
|
|
|
|
{
|
|
|
|
vkd3d_shader_error(message_context, NULL, VKD3D_SHADER_ERROR_HLSL_INCOMPATIBLE_PROFILE,
|
|
|
|
"The '%s' target profile is incompatible with the 'dxbc-tpf' target type.", profile->name);
|
|
|
|
return VKD3D_ERROR_INVALID_ARGUMENT;
|
|
|
|
}
|
|
|
|
|
2023-07-02 16:10:10 -07:00
|
|
|
if (!hlsl_ctx_init(&ctx, compile_info, profile, message_context))
|
2021-02-04 14:33:50 -08:00
|
|
|
return VKD3D_ERROR_OUT_OF_MEMORY;
|
|
|
|
|
2021-08-29 23:49:25 -07:00
|
|
|
if ((ret = hlsl_lexer_compile(&ctx, hlsl)) == 2)
|
2021-03-04 15:33:25 -08:00
|
|
|
{
|
|
|
|
hlsl_ctx_cleanup(&ctx);
|
|
|
|
return VKD3D_ERROR_OUT_OF_MEMORY;
|
|
|
|
}
|
2021-02-04 14:33:50 -08:00
|
|
|
|
2021-05-20 22:32:23 -07:00
|
|
|
if (ctx.result)
|
2021-03-04 15:33:24 -08:00
|
|
|
{
|
|
|
|
hlsl_ctx_cleanup(&ctx);
|
2021-05-20 22:32:23 -07:00
|
|
|
return ctx.result;
|
2021-03-04 15:33:24 -08:00
|
|
|
}
|
|
|
|
|
2021-08-29 23:49:25 -07:00
|
|
|
/* If parsing failed without an error condition being recorded, we
|
|
|
|
* plausibly hit some unimplemented feature. */
|
|
|
|
if (ret)
|
|
|
|
{
|
|
|
|
hlsl_ctx_cleanup(&ctx);
|
|
|
|
return VKD3D_ERROR_NOT_IMPLEMENTED;
|
|
|
|
}
|
|
|
|
|
2021-09-11 16:18:50 -07:00
|
|
|
if ((func = hlsl_get_function(&ctx, entry_point)))
|
|
|
|
{
|
vkd3d-shader/hlsl: Store function overloads in a list.
The choice to store them in an rbtree was made early on. It does not seem likely
that HLSL programs would define many overloads for any of their functions, but I
suspect the idea was rather that intrinsics would be defined as plain
hlsl_ir_function_decl structures [cf. 447463e5900ca6a636998a65429b8a08a5441657]
and that some intrinsics that could operate on any type would therefore need
many overrides.
This is not how we deal with intrinsics, however. When the first intrinsics were
implemented I made the choice disregard this intended design, and instead match
and convert their types manually, in C. Nothing that has happened in the time
since has led me to question that choice, and in fact, the flexibility with
which we must accommodate functions has led me to believe that matching in this
way was definitely the right choice. The main other designs I see would have
been:
* define each intrinsic variant separately using existing HLSL types. Besides
efficiency concerns (i.e. this would take more space in memory, and would take
longer to generate each variant), the normal type-matching rules don't really
apply to intrinsics.
[For example: elementwise intrinsics like abs() return the same type as the
input, including preserving the distinction between float and float1. It is
legal to define separate HLSL overloads taking float and float1, but trying to
invoke these functions yields an "ambiguous function call" error.]
* introduce new (semi-)generic types. This is far more code and ends up acting
like our current scheme (with helpers) in a slightly more complex form.
So I think we can go ahead and rip out this vestige of the original design for
intrinsics.
As for why to change it: rbtrees are simply more complex to deal with, and it
seems unlikely to me that the difference is going to matter. I do not expect any
program to define large quantities of intrinsics; linked list search should be
good enough.
2023-09-08 14:27:10 -07:00
|
|
|
LIST_FOR_EACH_ENTRY(decl, &func->overloads, struct hlsl_ir_function_decl, entry)
|
2021-09-11 16:18:50 -07:00
|
|
|
{
|
|
|
|
if (!decl->has_body)
|
|
|
|
continue;
|
2023-02-09 12:36:58 -08:00
|
|
|
if (entry_func)
|
|
|
|
{
|
|
|
|
/* Depending on d3dcompiler version, either the first or last is
|
|
|
|
* selected. */
|
|
|
|
hlsl_fixme(&ctx, &decl->loc, "Multiple valid entry point definitions.");
|
|
|
|
}
|
2021-09-11 16:18:50 -07:00
|
|
|
entry_func = decl;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!entry_func)
|
2021-03-04 15:33:24 -08:00
|
|
|
{
|
|
|
|
const struct vkd3d_shader_location loc = {.source_name = compile_info->source_name};
|
2021-02-04 14:33:50 -08:00
|
|
|
|
2021-12-01 08:14:57 -08:00
|
|
|
hlsl_error(&ctx, &loc, VKD3D_SHADER_ERROR_HLSL_NOT_DEFINED,
|
2021-03-04 15:33:24 -08:00
|
|
|
"Entry point \"%s\" is not defined.", entry_point);
|
2021-08-29 23:53:35 -07:00
|
|
|
hlsl_ctx_cleanup(&ctx);
|
2021-03-04 15:33:24 -08:00
|
|
|
return VKD3D_ERROR_INVALID_SHADER;
|
|
|
|
}
|
|
|
|
|
2022-02-28 03:23:43 -08:00
|
|
|
ret = hlsl_emit_bytecode(&ctx, entry_func, compile_info->target_type, out);
|
2021-03-04 15:33:24 -08:00
|
|
|
|
|
|
|
hlsl_ctx_cleanup(&ctx);
|
2021-02-04 14:33:50 -08:00
|
|
|
return ret;
|
2021-01-27 08:29:44 -08:00
|
|
|
}
|
2023-08-07 15:20:10 -07:00
|
|
|
|
|
|
|
struct hlsl_ir_function_decl *hlsl_compile_internal_function(struct hlsl_ctx *ctx, const char *name, const char *hlsl)
|
|
|
|
{
|
|
|
|
const struct hlsl_ir_function_decl *saved_cur_function = ctx->cur_function;
|
|
|
|
struct vkd3d_shader_code code = {.code = hlsl, .size = strlen(hlsl)};
|
|
|
|
const char *saved_internal_func_name = ctx->internal_func_name;
|
|
|
|
struct vkd3d_string_buffer *internal_name;
|
|
|
|
struct hlsl_ir_function_decl *func;
|
|
|
|
void *saved_scanner = ctx->scanner;
|
|
|
|
int ret;
|
|
|
|
|
|
|
|
TRACE("name %s, hlsl %s.\n", debugstr_a(name), debugstr_a(hlsl));
|
|
|
|
|
|
|
|
/* The actual name of the function is mangled with a unique prefix, both to
|
|
|
|
* allow defining multiple variants of a function with the same name, and to
|
|
|
|
* avoid polluting the user name space. */
|
|
|
|
|
|
|
|
if (!(internal_name = hlsl_get_string_buffer(ctx)))
|
|
|
|
return NULL;
|
|
|
|
vkd3d_string_buffer_printf(internal_name, "<%s-%u>", name, ctx->internal_name_counter++);
|
|
|
|
|
|
|
|
/* Save and restore everything that matters.
|
|
|
|
* Note that saving the scope stack is hard, and shouldn't be necessary. */
|
|
|
|
|
|
|
|
ctx->scanner = NULL;
|
|
|
|
ctx->internal_func_name = internal_name->buffer;
|
|
|
|
ctx->cur_function = NULL;
|
|
|
|
ret = hlsl_lexer_compile(ctx, &code);
|
|
|
|
ctx->scanner = saved_scanner;
|
|
|
|
ctx->internal_func_name = saved_internal_func_name;
|
|
|
|
ctx->cur_function = saved_cur_function;
|
|
|
|
if (ret)
|
|
|
|
{
|
|
|
|
ERR("Failed to compile intrinsic, error %u.\n", ret);
|
|
|
|
hlsl_release_string_buffer(ctx, internal_name);
|
|
|
|
return NULL;
|
|
|
|
}
|
2023-09-08 14:21:47 -07:00
|
|
|
func = hlsl_get_first_func_decl(ctx, internal_name->buffer);
|
2023-08-07 15:20:10 -07:00
|
|
|
hlsl_release_string_buffer(ctx, internal_name);
|
|
|
|
return func;
|
|
|
|
}
|