2021-01-27 08:29:44 -08:00
|
|
|
/*
|
|
|
|
* Copyright 2012 Matteo Bruni for CodeWeavers
|
|
|
|
* Copyright 2019-2020 Zebediah Figura for CodeWeavers
|
|
|
|
*
|
|
|
|
* This library is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Lesser General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2.1 of the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This library is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Lesser General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Lesser General Public
|
|
|
|
* License along with this library; if not, write to the Free Software
|
|
|
|
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
|
|
|
|
*/
|
|
|
|
|
|
|
|
#ifndef __VKD3D_SHADER_HLSL_H
|
|
|
|
#define __VKD3D_SHADER_HLSL_H
|
|
|
|
|
|
|
|
#include "vkd3d_shader_private.h"
|
|
|
|
#include "rbtree.h"
|
2021-08-19 16:44:27 -07:00
|
|
|
#include "vkd3d_d3dcommon.h"
|
2021-08-09 19:56:17 -07:00
|
|
|
#include "vkd3d_d3dx9shader.h"
|
2023-04-20 03:27:37 -07:00
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
/* The general IR structure is inspired by Mesa GLSL hir, even though the code
|
|
|
|
* ends up being quite different in practice. Anyway, here comes the relevant
|
|
|
|
* licensing information.
|
|
|
|
*
|
|
|
|
* Copyright © 2010 Intel Corporation
|
|
|
|
*
|
|
|
|
* Permission is hereby granted, free of charge, to any person obtaining a
|
|
|
|
* copy of this software and associated documentation files (the "Software"),
|
|
|
|
* to deal in the Software without restriction, including without limitation
|
|
|
|
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
|
|
|
* and/or sell copies of the Software, and to permit persons to whom the
|
|
|
|
* Software is furnished to do so, subject to the following conditions:
|
|
|
|
*
|
|
|
|
* The above copyright notice and this permission notice (including the next
|
|
|
|
* paragraph) shall be included in all copies or substantial portions of the
|
|
|
|
* Software.
|
|
|
|
*
|
|
|
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
|
|
|
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
|
|
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
|
|
|
* DEALINGS IN THE SOFTWARE.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#define HLSL_SWIZZLE_X (0u)
|
|
|
|
#define HLSL_SWIZZLE_Y (1u)
|
|
|
|
#define HLSL_SWIZZLE_Z (2u)
|
|
|
|
#define HLSL_SWIZZLE_W (3u)
|
|
|
|
|
|
|
|
#define HLSL_SWIZZLE(x, y, z, w) \
|
|
|
|
(((HLSL_SWIZZLE_ ## x) << 0) \
|
|
|
|
| ((HLSL_SWIZZLE_ ## y) << 2) \
|
|
|
|
| ((HLSL_SWIZZLE_ ## z) << 4) \
|
|
|
|
| ((HLSL_SWIZZLE_ ## w) << 6))
|
|
|
|
|
2023-01-12 13:52:49 -08:00
|
|
|
#define HLSL_SWIZZLE_MASK (0x3u)
|
|
|
|
#define HLSL_SWIZZLE_SHIFT(idx) (2u * (idx))
|
|
|
|
|
2023-12-06 09:20:25 -08:00
|
|
|
static inline unsigned int hlsl_swizzle_get_component(uint32_t swizzle, unsigned int idx)
|
2023-01-12 13:52:49 -08:00
|
|
|
{
|
|
|
|
return (swizzle >> HLSL_SWIZZLE_SHIFT(idx)) & HLSL_SWIZZLE_MASK;
|
|
|
|
}
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
enum hlsl_type_class
|
|
|
|
{
|
|
|
|
HLSL_CLASS_SCALAR,
|
|
|
|
HLSL_CLASS_VECTOR,
|
|
|
|
HLSL_CLASS_MATRIX,
|
|
|
|
HLSL_CLASS_LAST_NUMERIC = HLSL_CLASS_MATRIX,
|
|
|
|
HLSL_CLASS_STRUCT,
|
|
|
|
HLSL_CLASS_ARRAY,
|
2024-06-10 04:05:13 -07:00
|
|
|
HLSL_CLASS_DEPTH_STENCIL_STATE,
|
2024-02-27 11:31:20 -08:00
|
|
|
HLSL_CLASS_DEPTH_STENCIL_VIEW,
|
2024-02-05 18:35:22 -08:00
|
|
|
HLSL_CLASS_EFFECT_GROUP,
|
2024-02-06 15:08:01 -08:00
|
|
|
HLSL_CLASS_PASS,
|
2024-02-06 17:41:15 -08:00
|
|
|
HLSL_CLASS_PIXEL_SHADER,
|
2024-04-24 02:12:08 -07:00
|
|
|
HLSL_CLASS_RASTERIZER_STATE,
|
2024-02-27 11:34:52 -08:00
|
|
|
HLSL_CLASS_RENDER_TARGET_VIEW,
|
2024-02-05 18:13:17 -08:00
|
|
|
HLSL_CLASS_SAMPLER,
|
2024-02-05 18:04:02 -08:00
|
|
|
HLSL_CLASS_STRING,
|
2024-02-06 15:15:48 -08:00
|
|
|
HLSL_CLASS_TECHNIQUE,
|
2024-02-05 18:25:57 -08:00
|
|
|
HLSL_CLASS_TEXTURE,
|
2024-02-05 18:32:37 -08:00
|
|
|
HLSL_CLASS_UAV,
|
2024-02-06 15:33:26 -08:00
|
|
|
HLSL_CLASS_VERTEX_SHADER,
|
2024-08-05 09:41:23 -07:00
|
|
|
HLSL_CLASS_COMPUTE_SHADER,
|
|
|
|
HLSL_CLASS_DOMAIN_SHADER,
|
|
|
|
HLSL_CLASS_HULL_SHADER,
|
2024-08-06 07:41:46 -07:00
|
|
|
HLSL_CLASS_GEOMETRY_SHADER,
|
2024-05-27 15:31:51 -07:00
|
|
|
HLSL_CLASS_CONSTANT_BUFFER,
|
2024-08-07 03:49:04 -07:00
|
|
|
HLSL_CLASS_BLEND_STATE,
|
2024-04-09 14:42:00 -07:00
|
|
|
HLSL_CLASS_VOID,
|
2024-07-23 06:30:27 -07:00
|
|
|
HLSL_CLASS_NULL,
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
|
|
|
enum hlsl_base_type
|
|
|
|
{
|
|
|
|
HLSL_TYPE_FLOAT,
|
|
|
|
HLSL_TYPE_HALF,
|
|
|
|
HLSL_TYPE_DOUBLE,
|
|
|
|
HLSL_TYPE_INT,
|
|
|
|
HLSL_TYPE_UINT,
|
|
|
|
HLSL_TYPE_BOOL,
|
|
|
|
HLSL_TYPE_LAST_SCALAR = HLSL_TYPE_BOOL,
|
|
|
|
};
|
|
|
|
|
|
|
|
enum hlsl_sampler_dim
|
|
|
|
{
|
2024-01-31 15:08:00 -08:00
|
|
|
HLSL_SAMPLER_DIM_GENERIC = 0,
|
2023-05-16 11:40:10 -07:00
|
|
|
HLSL_SAMPLER_DIM_COMPARISON,
|
2023-05-02 18:47:05 -07:00
|
|
|
HLSL_SAMPLER_DIM_1D,
|
|
|
|
HLSL_SAMPLER_DIM_2D,
|
|
|
|
HLSL_SAMPLER_DIM_3D,
|
|
|
|
HLSL_SAMPLER_DIM_CUBE,
|
|
|
|
HLSL_SAMPLER_DIM_LAST_SAMPLER = HLSL_SAMPLER_DIM_CUBE,
|
|
|
|
HLSL_SAMPLER_DIM_1DARRAY,
|
|
|
|
HLSL_SAMPLER_DIM_2DARRAY,
|
|
|
|
HLSL_SAMPLER_DIM_2DMS,
|
|
|
|
HLSL_SAMPLER_DIM_2DMSARRAY,
|
|
|
|
HLSL_SAMPLER_DIM_CUBEARRAY,
|
|
|
|
HLSL_SAMPLER_DIM_BUFFER,
|
2023-05-16 03:55:31 -07:00
|
|
|
HLSL_SAMPLER_DIM_STRUCTURED_BUFFER,
|
|
|
|
HLSL_SAMPLER_DIM_MAX = HLSL_SAMPLER_DIM_STRUCTURED_BUFFER,
|
2024-01-31 15:08:00 -08:00
|
|
|
/* NOTE: Remember to update object_methods[] in hlsl.y if this enum is modified. */
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2022-10-28 08:23:05 -07:00
|
|
|
enum hlsl_regset
|
|
|
|
{
|
|
|
|
HLSL_REGSET_SAMPLERS,
|
|
|
|
HLSL_REGSET_TEXTURES,
|
|
|
|
HLSL_REGSET_UAVS,
|
|
|
|
HLSL_REGSET_LAST_OBJECT = HLSL_REGSET_UAVS,
|
|
|
|
HLSL_REGSET_NUMERIC,
|
|
|
|
HLSL_REGSET_LAST = HLSL_REGSET_NUMERIC,
|
|
|
|
};
|
|
|
|
|
2022-11-14 08:48:55 -08:00
|
|
|
/* An HLSL source-level data type, including anonymous structs and typedefs. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_type
|
|
|
|
{
|
2022-11-14 08:48:55 -08:00
|
|
|
/* Item entry in hlsl_ctx->types. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct list entry;
|
2022-11-14 08:48:55 -08:00
|
|
|
/* Item entry in hlsl_scope->types. hlsl_type->name is used as key (if not NULL). */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct rb_entry scope_entry;
|
2022-11-14 08:48:55 -08:00
|
|
|
|
2022-11-11 17:31:55 -08:00
|
|
|
enum hlsl_type_class class;
|
2022-11-14 08:48:55 -08:00
|
|
|
|
2024-02-05 18:13:17 -08:00
|
|
|
/* If class is HLSL_CLASS_SAMPLER, then sampler_dim is <= HLSL_SAMPLER_DIM_LAST_SAMPLER.
|
2024-02-05 18:25:57 -08:00
|
|
|
* If class is HLSL_CLASS_TEXTURE, then sampler_dim can be any value of the enum except
|
2024-01-31 15:08:00 -08:00
|
|
|
* HLSL_SAMPLER_DIM_GENERIC and HLSL_SAMPLER_DIM_COMPARISON.
|
2024-02-05 18:32:37 -08:00
|
|
|
* If class is HLSL_CLASS_UAV, then sampler_dim must be one of HLSL_SAMPLER_DIM_1D,
|
2023-05-02 18:47:05 -07:00
|
|
|
* HLSL_SAMPLER_DIM_2D, HLSL_SAMPLER_DIM_3D, HLSL_SAMPLER_DIM_1DARRAY, HLSL_SAMPLER_DIM_2DARRAY,
|
2023-05-16 03:55:31 -07:00
|
|
|
* HLSL_SAMPLER_DIM_BUFFER, or HLSL_SAMPLER_DIM_STRUCTURED_BUFFER.
|
2022-11-14 08:48:55 -08:00
|
|
|
* Otherwise, sampler_dim is not used */
|
2021-01-27 08:29:44 -08:00
|
|
|
enum hlsl_sampler_dim sampler_dim;
|
2022-11-14 08:48:55 -08:00
|
|
|
/* Name, in case the type is a named struct or a typedef. */
|
2021-01-27 08:29:44 -08:00
|
|
|
const char *name;
|
2022-11-14 08:48:55 -08:00
|
|
|
/* Bitfield for storing type modifiers, subset of HLSL_TYPE_MODIFIERS_MASK.
|
|
|
|
* Modifiers that don't fall inside this mask are to be stored in the variable in
|
|
|
|
* hlsl_ir_var.modifiers, or in the struct field in hlsl_ir_field.modifiers. */
|
2023-12-06 10:36:35 -08:00
|
|
|
uint32_t modifiers;
|
2022-11-14 08:48:55 -08:00
|
|
|
/* Size of the type values on each dimension. For non-numeric types, they are set for the
|
|
|
|
* convenience of the sm1/sm4 backends.
|
|
|
|
* If type is HLSL_CLASS_SCALAR, then both dimx = 1 and dimy = 1.
|
|
|
|
* If type is HLSL_CLASS_VECTOR, then dimx is the size of the vector, and dimy = 1.
|
|
|
|
* If type is HLSL_CLASS_MATRIX, then dimx is the number of columns, and dimy the number of rows.
|
|
|
|
* If type is HLSL_CLASS_ARRAY, then dimx and dimy have the same value as in the type of the array elements.
|
|
|
|
* If type is HLSL_CLASS_STRUCT, then dimx is the sum of (dimx * dimy) of every component, and dimy = 1.
|
2024-04-09 14:42:00 -07:00
|
|
|
*/
|
2021-01-27 08:29:44 -08:00
|
|
|
unsigned int dimx;
|
|
|
|
unsigned int dimy;
|
2023-03-04 10:46:12 -08:00
|
|
|
/* Sample count for HLSL_SAMPLER_DIM_2DMS or HLSL_SAMPLER_DIM_2DMSARRAY. */
|
|
|
|
unsigned int sample_count;
|
2022-11-14 08:48:55 -08:00
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
union
|
|
|
|
{
|
2024-02-27 15:30:51 -08:00
|
|
|
/* Additional information if type is numeric. */
|
|
|
|
struct
|
|
|
|
{
|
|
|
|
enum hlsl_base_type type;
|
|
|
|
} numeric;
|
2022-11-14 08:48:55 -08:00
|
|
|
/* Additional information if type is HLSL_CLASS_STRUCT. */
|
2022-07-14 18:23:43 -07:00
|
|
|
struct
|
|
|
|
{
|
|
|
|
struct hlsl_struct_field *fields;
|
|
|
|
size_t field_count;
|
|
|
|
} record;
|
2022-11-14 08:48:55 -08:00
|
|
|
/* Additional information if type is HLSL_CLASS_ARRAY. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct
|
|
|
|
{
|
|
|
|
struct hlsl_type *type;
|
2023-03-07 17:04:37 -08:00
|
|
|
/* Array length, or HLSL_ARRAY_ELEMENTS_COUNT_IMPLICIT if it is not known yet at parse time. */
|
2021-01-27 08:29:44 -08:00
|
|
|
unsigned int elements_count;
|
|
|
|
} array;
|
2024-02-05 18:32:37 -08:00
|
|
|
/* Additional information if the class is HLSL_CLASS_TEXTURE or
|
|
|
|
* HLSL_CLASS_UAV. */
|
2024-02-19 04:57:54 -08:00
|
|
|
struct
|
|
|
|
{
|
|
|
|
/* Format of the data contained within the type. */
|
|
|
|
struct hlsl_type *format;
|
2024-02-19 03:52:31 -08:00
|
|
|
/* The type is a rasteriser-ordered view. */
|
|
|
|
bool rasteriser_ordered;
|
2024-02-19 04:57:54 -08:00
|
|
|
} resource;
|
2023-11-05 08:28:40 -08:00
|
|
|
/* Additional field to distinguish object types. Currently used only for technique types. */
|
|
|
|
unsigned int version;
|
2021-01-27 08:29:44 -08:00
|
|
|
} e;
|
2021-04-15 17:03:46 -07:00
|
|
|
|
2022-10-28 08:23:05 -07:00
|
|
|
/* Number of numeric register components used by one value of this type, for each regset.
|
|
|
|
* For HLSL_REGSET_NUMERIC, 4 components make 1 register, while for other regsets 1 component makes
|
|
|
|
* 1 register.
|
|
|
|
* If type is HLSL_CLASS_STRUCT or HLSL_CLASS_ARRAY, the reg_size of their elements and padding
|
|
|
|
* (which varies according to the backend) is also included. */
|
|
|
|
unsigned int reg_size[HLSL_REGSET_LAST + 1];
|
2022-11-14 08:48:55 -08:00
|
|
|
/* Offset where the type's description starts in the output bytecode, in bytes. */
|
2021-07-08 19:13:19 -07:00
|
|
|
size_t bytecode_offset;
|
vkd3d-shader/hlsl: Reinterpret minimum precision types as their regular counterparts.
Reinterpret min16float, min10float, min16int, min12int, and min16uint
as their regular counterparts: float, float, int, int, uint,
respectively.
A proper implementation would require adding minimum precision
indicators to all the dxbc-tpf instructions that use these types.
Consider the output of fxc 10.1 with the following shader:
uniform int i;
float4 main() : sv_target
{
min16float4 a = {0, 1, 2, i};
min16int2 b = {4, i};
min10float3 c = {6.4, 7, i};
min12int d = 9.4;
min16uint4x2 e = {14.4, 15, 16, 17, 18, 19, 20, i};
return mul(e, b) + a + c.xyzx + d;
}
However, if the graphics driver doesn't have minimum precision support,
it ignores the minimum precision indicators and runs at 32-bit
precision, which is equivalent as working with regular types.
2022-12-07 13:06:06 -08:00
|
|
|
|
|
|
|
uint32_t is_minimum_precision : 1;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2022-11-15 19:34:36 -08:00
|
|
|
/* In HLSL, a semantic is a string linked to a variable (or a field) to be recognized across
|
|
|
|
* different shader stages in the graphics pipeline. */
|
2021-04-27 10:14:19 -07:00
|
|
|
struct hlsl_semantic
|
|
|
|
{
|
|
|
|
const char *name;
|
|
|
|
uint32_t index;
|
2023-04-05 09:07:37 -07:00
|
|
|
|
2024-05-31 03:25:12 -07:00
|
|
|
/* Name exactly as it appears in the sources. */
|
|
|
|
const char *raw_name;
|
2023-04-05 09:07:37 -07:00
|
|
|
/* If the variable or field that stores this hlsl_semantic has already reported that it is missing. */
|
|
|
|
bool reported_missing;
|
2023-04-12 12:59:06 -07:00
|
|
|
/* In case the variable or field that stores this semantic has already reported to use a
|
|
|
|
* duplicated output semantic, this value stores the last reported index + 1. Otherwise it is 0. */
|
|
|
|
uint32_t reported_duplicated_output_next_index;
|
2023-04-12 13:27:31 -07:00
|
|
|
/* In case the variable or field that stores this semantic has already reported to use a
|
|
|
|
* duplicated input semantic with incompatible values, this value stores the last reported
|
|
|
|
* index + 1. Otherwise it is 0. */
|
|
|
|
uint32_t reported_duplicated_input_incompatible_next_index;
|
2021-04-27 10:14:19 -07:00
|
|
|
};
|
|
|
|
|
2022-11-14 09:25:24 -08:00
|
|
|
/* A field within a struct type declaration, used in hlsl_type.e.fields. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_struct_field
|
|
|
|
{
|
2021-03-02 13:34:43 -08:00
|
|
|
struct vkd3d_shader_location loc;
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_type *type;
|
|
|
|
const char *name;
|
2021-04-27 10:14:19 -07:00
|
|
|
struct hlsl_semantic semantic;
|
2022-11-14 09:25:24 -08:00
|
|
|
|
|
|
|
/* Bitfield for storing modifiers that are not in HLSL_TYPE_MODIFIERS_MASK (these are stored in
|
|
|
|
* type->modifiers instead) and that also are specific to the field and not the whole variable.
|
|
|
|
* In particular, interpolation modifiers. */
|
2023-12-06 10:36:35 -08:00
|
|
|
uint32_t storage_modifiers;
|
2022-10-28 08:23:05 -07:00
|
|
|
/* Offset of the field within the type it belongs to, in register components, for each regset. */
|
|
|
|
unsigned int reg_offset[HLSL_REGSET_LAST + 1];
|
2021-04-27 10:14:15 -07:00
|
|
|
|
2023-03-07 17:04:37 -08:00
|
|
|
/* Offset where the field name starts in the output bytecode, in bytes. */
|
2021-07-08 19:13:19 -07:00
|
|
|
size_t name_bytecode_offset;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2023-04-25 09:41:38 -07:00
|
|
|
/* Information of the register(s) allocated for an instruction node or variable.
|
2022-11-15 19:07:03 -08:00
|
|
|
* These values are initialized at the end of hlsl_emit_bytecode(), after the compilation passes,
|
|
|
|
* just before writing the bytecode.
|
|
|
|
* The type of register (register class) is implied from its use, so it is not stored in this
|
|
|
|
* struct. */
|
2021-04-08 21:38:23 -07:00
|
|
|
struct hlsl_reg
|
|
|
|
{
|
2023-08-29 10:13:35 -07:00
|
|
|
/* Register number of the first register allocated. */
|
2021-04-08 21:38:23 -07:00
|
|
|
uint32_t id;
|
2023-08-29 10:13:35 -07:00
|
|
|
/* For descriptors (buffer, texture, sampler, UAV) this is the base binding
|
|
|
|
* index of the descriptor.
|
|
|
|
* For 5.1 and above descriptors have space and may be arrayed, in which
|
|
|
|
* case the array shares a single register ID but has a range of register
|
|
|
|
* indices, and "id" and "index" are as a rule not equal.
|
|
|
|
* For versions below 5.1, the register number for descriptors is the same
|
|
|
|
* as its external binding index, so only "index" is used, and "id" is
|
|
|
|
* ignored.
|
|
|
|
* For numeric registers "index" is not used. */
|
|
|
|
uint32_t index;
|
2023-08-29 10:30:00 -07:00
|
|
|
/* Register space of a descriptor. Not used for numeric registers. */
|
|
|
|
uint32_t space;
|
2023-04-25 09:41:38 -07:00
|
|
|
/* Number of registers to be allocated.
|
|
|
|
* Unlike the variable's type's regsize, it is not expressed in register components, but rather
|
|
|
|
* in whole registers, and may depend on which components are used within the shader. */
|
vkd3d-shader/hlsl: Rename hlsl_reg.bind_count to hlsl_reg.allocation_size.
We have to distinguish between the "bind count" and the "allocation size"
of variables.
The "allocation size" affects the starting register id for the resource to
be allocated next, while the "bind count" is determined by the last field
actually used. The former may be larger than the latter.
What we are currently calling hlsl_reg.bind_count is actually the
"allocation size", so a rename is in order.
The real "bind count", which will be introduced in following patches,
is important because it is what should be shown in the RDEF table and
some resource allocation rules depend on it.
For instance, for this shader:
texture2D texs[3];
texture2D tex;
float4 main() : sv_target
{
return texs[0].Load(int3(0, 0, 0)) + tex.Load(int3(0, 0, 0));
}
the variable "texs" has a "bind count" of 1, but an "allocation size" of
3:
// Resource Bindings:
//
// Name Type Format Dim HLSL Bind Count
// ------------------------------ ---------- ------- ----------- -------------- ------
// texs texture float4 2d t0 1
// tex texture float4 2d t3 1
2023-08-04 10:21:27 -07:00
|
|
|
uint32_t allocation_size;
|
2023-04-25 09:41:38 -07:00
|
|
|
/* For numeric registers, a writemask can be provided to indicate the reservation of only some
|
|
|
|
* of the 4 components. */
|
2021-04-08 21:38:23 -07:00
|
|
|
unsigned int writemask;
|
2022-11-15 19:07:03 -08:00
|
|
|
/* Whether the register has been allocated. */
|
2021-04-08 21:38:23 -07:00
|
|
|
bool allocated;
|
|
|
|
};
|
|
|
|
|
2022-11-14 17:46:17 -08:00
|
|
|
/* Types of instruction nodes for the IR.
|
|
|
|
* Each type of instruction node is associated to a struct with the same name in lower case.
|
|
|
|
* e.g. for HLSL_IR_CONSTANT there exists struct hlsl_ir_constant.
|
|
|
|
* Each one of these structs start with a struct hlsl_ir_node field, so pointers to values of these
|
|
|
|
* types can be casted seamlessly to (struct hlsl_ir_node *) and vice-versa. */
|
2021-01-27 08:29:44 -08:00
|
|
|
enum hlsl_ir_node_type
|
|
|
|
{
|
2021-09-09 19:06:38 -07:00
|
|
|
HLSL_IR_CALL,
|
2021-01-27 08:29:44 -08:00
|
|
|
HLSL_IR_CONSTANT,
|
|
|
|
HLSL_IR_EXPR,
|
|
|
|
HLSL_IR_IF,
|
2023-02-24 11:39:56 -08:00
|
|
|
HLSL_IR_INDEX,
|
2021-01-27 08:29:44 -08:00
|
|
|
HLSL_IR_LOAD,
|
|
|
|
HLSL_IR_LOOP,
|
|
|
|
HLSL_IR_JUMP,
|
2021-10-07 19:58:57 -07:00
|
|
|
HLSL_IR_RESOURCE_LOAD,
|
2021-08-15 10:08:32 -07:00
|
|
|
HLSL_IR_RESOURCE_STORE,
|
2024-06-14 16:59:21 -07:00
|
|
|
HLSL_IR_STRING_CONSTANT,
|
2021-04-08 21:38:22 -07:00
|
|
|
HLSL_IR_STORE,
|
2021-01-27 08:29:44 -08:00
|
|
|
HLSL_IR_SWIZZLE,
|
2023-10-11 04:51:51 -07:00
|
|
|
HLSL_IR_SWITCH,
|
2024-03-18 11:31:04 -07:00
|
|
|
HLSL_IR_STATEBLOCK_CONSTANT,
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2022-11-14 17:46:17 -08:00
|
|
|
/* Common data for every type of IR instruction node. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_ir_node
|
|
|
|
{
|
2022-11-14 17:46:17 -08:00
|
|
|
/* Item entry for storing the instruction in a list of instructions. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct list entry;
|
2022-11-14 17:46:17 -08:00
|
|
|
|
|
|
|
/* Type of node, which means that a pointer to this struct hlsl_ir_node can be casted to a
|
|
|
|
* pointer to the struct with the same name. */
|
2021-01-27 08:29:44 -08:00
|
|
|
enum hlsl_ir_node_type type;
|
2022-11-14 17:46:17 -08:00
|
|
|
/* HLSL data type of the node, when used by other nodes as a source (through an hlsl_src).
|
|
|
|
* HLSL_IR_CONSTANT, HLSL_IR_EXPR, HLSL_IR_LOAD, HLSL_IR_RESOURCE_LOAD, and HLSL_IR_SWIZZLE
|
|
|
|
* have a data type and can be used through an hlsl_src; other types of node don't. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_type *data_type;
|
|
|
|
|
2022-11-14 17:46:17 -08:00
|
|
|
/* List containing all the struct hlsl_src·s that point to this node; linked by the
|
|
|
|
* hlsl_src.entry fields. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct list uses;
|
|
|
|
|
2021-02-12 08:48:55 -08:00
|
|
|
struct vkd3d_shader_location loc;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
|
|
|
/* Liveness ranges. "index" is the index of this instruction. Since this is
|
|
|
|
* essentially an SSA value, the earliest live point is the index. This is
|
|
|
|
* true even for loops, since currently we can't have a reference to a
|
|
|
|
* value generated in an earlier iteration of the loop. */
|
|
|
|
unsigned int index, last_read;
|
2022-11-14 17:46:17 -08:00
|
|
|
/* Temp. register allocated to store the result of this instruction (if any). */
|
2021-04-08 21:38:24 -07:00
|
|
|
struct hlsl_reg reg;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2021-10-15 14:54:10 -07:00
|
|
|
struct hlsl_block
|
|
|
|
{
|
2022-11-15 19:34:36 -08:00
|
|
|
/* List containing instruction nodes; linked by the hlsl_ir_node.entry fields. */
|
2021-10-15 14:54:10 -07:00
|
|
|
struct list instrs;
|
|
|
|
};
|
|
|
|
|
2022-11-14 19:37:07 -08:00
|
|
|
/* A reference to an instruction node (struct hlsl_ir_node), usable as a field in other structs.
|
|
|
|
* struct hlsl_src is more powerful than a mere pointer to an hlsl_ir_node because it also
|
|
|
|
* contains a linked list item entry, which is used by the referenced instruction node to keep
|
|
|
|
* track of all the hlsl_src·s that reference it.
|
|
|
|
* This allows replacing any hlsl_ir_node with any other in all the places it is used, or checking
|
|
|
|
* that a node has no uses before it is removed. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_src
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node *node;
|
2022-11-14 19:37:07 -08:00
|
|
|
/* Item entry for node->uses. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct list entry;
|
|
|
|
};
|
|
|
|
|
2021-08-16 12:52:10 -07:00
|
|
|
struct hlsl_attribute
|
|
|
|
{
|
|
|
|
const char *name;
|
2022-11-14 18:50:17 -08:00
|
|
|
struct hlsl_block instrs;
|
2021-08-16 12:52:10 -07:00
|
|
|
struct vkd3d_shader_location loc;
|
|
|
|
unsigned int args_count;
|
|
|
|
struct hlsl_src args[];
|
|
|
|
};
|
|
|
|
|
2023-10-12 10:32:02 -07:00
|
|
|
#define HLSL_STORAGE_EXTERN 0x00000001
|
|
|
|
#define HLSL_STORAGE_NOINTERPOLATION 0x00000002
|
|
|
|
#define HLSL_MODIFIER_PRECISE 0x00000004
|
|
|
|
#define HLSL_STORAGE_SHARED 0x00000008
|
|
|
|
#define HLSL_STORAGE_GROUPSHARED 0x00000010
|
|
|
|
#define HLSL_STORAGE_STATIC 0x00000020
|
|
|
|
#define HLSL_STORAGE_UNIFORM 0x00000040
|
|
|
|
#define HLSL_MODIFIER_VOLATILE 0x00000080
|
|
|
|
#define HLSL_MODIFIER_CONST 0x00000100
|
|
|
|
#define HLSL_MODIFIER_ROW_MAJOR 0x00000200
|
|
|
|
#define HLSL_MODIFIER_COLUMN_MAJOR 0x00000400
|
|
|
|
#define HLSL_STORAGE_IN 0x00000800
|
|
|
|
#define HLSL_STORAGE_OUT 0x00001000
|
|
|
|
#define HLSL_MODIFIER_INLINE 0x00002000
|
|
|
|
#define HLSL_STORAGE_CENTROID 0x00004000
|
|
|
|
#define HLSL_STORAGE_NOPERSPECTIVE 0x00008000
|
|
|
|
#define HLSL_STORAGE_LINEAR 0x00010000
|
2024-02-24 17:51:58 -08:00
|
|
|
#define HLSL_MODIFIER_SINGLE 0x00020000
|
2024-03-31 06:36:00 -07:00
|
|
|
#define HLSL_MODIFIER_EXPORT 0x00040000
|
2024-06-15 10:56:25 -07:00
|
|
|
#define HLSL_STORAGE_ANNOTATION 0x00080000
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2022-12-12 15:17:53 -08:00
|
|
|
#define HLSL_TYPE_MODIFIERS_MASK (HLSL_MODIFIER_PRECISE | HLSL_MODIFIER_VOLATILE | \
|
2021-01-27 08:29:44 -08:00
|
|
|
HLSL_MODIFIER_CONST | HLSL_MODIFIER_ROW_MAJOR | \
|
2024-02-19 03:52:31 -08:00
|
|
|
HLSL_MODIFIER_COLUMN_MAJOR)
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2023-09-21 06:13:27 -07:00
|
|
|
#define HLSL_INTERPOLATION_MODIFIERS_MASK (HLSL_STORAGE_NOINTERPOLATION | HLSL_STORAGE_CENTROID | \
|
2023-11-26 11:36:29 -08:00
|
|
|
HLSL_STORAGE_NOPERSPECTIVE | HLSL_STORAGE_LINEAR)
|
2023-09-21 06:13:27 -07:00
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
#define HLSL_MODIFIERS_MAJORITY_MASK (HLSL_MODIFIER_ROW_MAJOR | HLSL_MODIFIER_COLUMN_MAJOR)
|
|
|
|
|
2022-08-08 15:11:03 -07:00
|
|
|
#define HLSL_ARRAY_ELEMENTS_COUNT_IMPLICIT 0
|
|
|
|
|
2023-02-22 09:53:17 -08:00
|
|
|
/* Reservation of a register and/or an offset for objects inside constant buffers, to be used as a
|
|
|
|
* starting point of their allocation. They are available through the register(·) and the
|
2024-08-16 15:21:21 -07:00
|
|
|
* packoffset(·) syntaxes, respectively.
|
2023-11-11 17:49:38 -08:00
|
|
|
* The constant buffer offset is measured register components. */
|
2021-02-02 14:11:14 -08:00
|
|
|
struct hlsl_reg_reservation
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2023-03-06 14:53:29 -08:00
|
|
|
char reg_type;
|
2021-08-16 19:01:51 -07:00
|
|
|
unsigned int reg_space, reg_index;
|
2023-02-22 09:53:17 -08:00
|
|
|
|
|
|
|
char offset_type;
|
|
|
|
unsigned int offset_index;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2024-04-08 13:15:20 -07:00
|
|
|
union hlsl_constant_value_component
|
|
|
|
{
|
|
|
|
uint32_t u;
|
|
|
|
int32_t i;
|
|
|
|
float f;
|
|
|
|
double d;
|
|
|
|
};
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_ir_var
|
|
|
|
{
|
|
|
|
struct hlsl_type *data_type;
|
2021-02-12 08:48:55 -08:00
|
|
|
struct vkd3d_shader_location loc;
|
2021-01-27 08:29:44 -08:00
|
|
|
const char *name;
|
2021-04-27 10:14:19 -07:00
|
|
|
struct hlsl_semantic semantic;
|
2022-11-14 12:14:03 -08:00
|
|
|
/* Buffer where the variable's value is stored, in case it is uniform. */
|
2021-06-21 21:37:10 -07:00
|
|
|
struct hlsl_buffer *buffer;
|
2022-11-14 12:14:03 -08:00
|
|
|
/* Bitfield for storage modifiers (type modifiers are stored in data_type->modifiers). */
|
2023-12-06 10:36:35 -08:00
|
|
|
uint32_t storage_modifiers;
|
2023-02-22 09:53:17 -08:00
|
|
|
/* Optional reservations of registers and/or offsets for variables within constant buffers. */
|
2021-05-31 19:41:14 -07:00
|
|
|
struct hlsl_reg_reservation reg_reservation;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2022-11-14 12:14:03 -08:00
|
|
|
/* Item entry in hlsl_scope.vars. Specifically hlsl_ctx.globals.vars if the variable is global. */
|
|
|
|
struct list scope_entry;
|
|
|
|
/* Item entry in hlsl_ctx.extern_vars, if the variable is extern. */
|
|
|
|
struct list extern_entry;
|
2023-11-05 10:27:00 -08:00
|
|
|
/* Scope that variable itself defines, used to provide a container for techniques and passes. */
|
|
|
|
struct hlsl_scope *scope;
|
2023-11-13 16:28:28 -08:00
|
|
|
/* Scope that contains annotations for this variable. */
|
|
|
|
struct hlsl_scope *annotations;
|
2022-11-14 12:14:03 -08:00
|
|
|
|
2024-04-08 13:15:20 -07:00
|
|
|
/* Array of default values the variable was initialized with, one for each component.
|
|
|
|
* Only for variables that need it, such as uniforms and variables inside constant buffers.
|
|
|
|
* This pointer is NULL for others. */
|
|
|
|
struct hlsl_default_value
|
|
|
|
{
|
2024-08-16 15:21:21 -07:00
|
|
|
/* Default value, in case the component is a string, otherwise it is NULL. */
|
2024-06-14 16:59:21 -07:00
|
|
|
const char *string;
|
2024-04-08 13:15:20 -07:00
|
|
|
/* Default value, in case the component is a numeric value. */
|
2024-06-14 16:59:21 -07:00
|
|
|
union hlsl_constant_value_component number;
|
2024-04-08 13:15:20 -07:00
|
|
|
} *default_values;
|
|
|
|
|
2024-03-18 17:57:36 -07:00
|
|
|
/* A dynamic array containing the state block on the variable's declaration, if any.
|
|
|
|
* An array variable may contain multiple state blocks.
|
2024-03-19 11:35:59 -07:00
|
|
|
* A technique pass will always contain one.
|
2024-03-15 15:01:34 -07:00
|
|
|
* These are only really used for effect profiles. */
|
2024-03-18 17:57:36 -07:00
|
|
|
struct hlsl_state_block **state_blocks;
|
|
|
|
unsigned int state_block_count;
|
|
|
|
size_t state_block_capacity;
|
2024-03-15 15:01:34 -07:00
|
|
|
|
2022-11-14 12:14:03 -08:00
|
|
|
/* Indexes of the IR instructions where the variable is first written and last read (liveness
|
|
|
|
* range). The IR instructions are numerated starting from 2, because 0 means unused, and 1
|
|
|
|
* means function entry. */
|
2021-01-27 08:29:44 -08:00
|
|
|
unsigned int first_write, last_read;
|
2022-11-14 12:14:03 -08:00
|
|
|
/* Offset where the variable's value is stored within its buffer in numeric register components.
|
|
|
|
* This in case the variable is uniform. */
|
2021-06-23 21:57:35 -07:00
|
|
|
unsigned int buffer_offset;
|
2022-11-24 12:03:54 -08:00
|
|
|
/* Register to which the variable is allocated during its lifetime, for each register set.
|
|
|
|
* In case that the variable spans multiple registers in one regset, this is set to the
|
|
|
|
* start of the register range.
|
2022-11-14 12:14:03 -08:00
|
|
|
* Builtin semantics don't use the field.
|
|
|
|
* In SM4, uniforms don't use the field because they are located using the buffer's hlsl_reg
|
2022-11-24 12:03:54 -08:00
|
|
|
* and the buffer_offset instead. */
|
|
|
|
struct hlsl_reg regs[HLSL_REGSET_LAST + 1];
|
2021-03-22 15:02:37 -07:00
|
|
|
|
2022-11-25 14:47:56 -08:00
|
|
|
struct
|
|
|
|
{
|
|
|
|
bool used;
|
2022-11-25 15:38:33 -08:00
|
|
|
enum hlsl_sampler_dim sampler_dim;
|
|
|
|
struct vkd3d_shader_location first_sampler_dim_loc;
|
2022-11-25 14:47:56 -08:00
|
|
|
} *objects_usage[HLSL_REGSET_LAST_OBJECT + 1];
|
2024-05-06 23:45:14 -07:00
|
|
|
/* Minimum number of binds required to include all components actually used in the shader.
|
|
|
|
* It may be less than the allocation size, e.g. for texture arrays.
|
|
|
|
* The bind_count for HLSL_REGSET_NUMERIC is only used in uniforms for now. */
|
|
|
|
unsigned int bind_count[HLSL_REGSET_LAST + 1];
|
2022-11-25 14:47:56 -08:00
|
|
|
|
2023-10-04 13:28:02 -07:00
|
|
|
/* Whether the shader performs dereferences with non-constant offsets in the variable. */
|
|
|
|
bool indexable;
|
|
|
|
|
2021-04-27 10:14:20 -07:00
|
|
|
uint32_t is_input_semantic : 1;
|
|
|
|
uint32_t is_output_semantic : 1;
|
2021-03-22 15:02:37 -07:00
|
|
|
uint32_t is_uniform : 1;
|
2021-04-15 17:03:46 -07:00
|
|
|
uint32_t is_param : 1;
|
2023-05-29 18:59:17 -07:00
|
|
|
uint32_t is_separated_resource : 1;
|
2024-04-08 13:15:20 -07:00
|
|
|
uint32_t is_synthetic : 1;
|
2024-06-01 15:15:48 -07:00
|
|
|
uint32_t has_explicit_bind_point : 1;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2024-03-15 15:01:34 -07:00
|
|
|
/* This struct is used to represent assignments in state block entries:
|
|
|
|
* name = {args[0], args[1], ...};
|
|
|
|
* - or -
|
|
|
|
* name = args[0]
|
|
|
|
* - or -
|
|
|
|
* name[lhs_index] = args[0]
|
|
|
|
* - or -
|
|
|
|
* name[lhs_index] = {args[0], args[1], ...};
|
2024-03-19 12:06:12 -07:00
|
|
|
*
|
|
|
|
* This struct also represents function call syntax:
|
|
|
|
* name(args[0], args[1], ...)
|
2024-03-15 15:01:34 -07:00
|
|
|
*/
|
|
|
|
struct hlsl_state_block_entry
|
|
|
|
{
|
2024-03-19 12:06:12 -07:00
|
|
|
/* Whether this entry is a function call. */
|
|
|
|
bool is_function_call;
|
|
|
|
|
|
|
|
/* For assignments, the name in the lhs.
|
|
|
|
* For functions, the name of the function. */
|
2024-03-15 15:01:34 -07:00
|
|
|
char *name;
|
2024-04-20 15:33:41 -07:00
|
|
|
/* Resolved format-specific property identifier. */
|
|
|
|
unsigned int name_id;
|
2024-03-15 15:01:34 -07:00
|
|
|
|
2024-03-19 12:06:12 -07:00
|
|
|
/* For assignments, whether the lhs of an assignment is indexed and, in
|
|
|
|
* that case, its index. */
|
2024-03-15 15:01:34 -07:00
|
|
|
bool lhs_has_index;
|
|
|
|
unsigned int lhs_index;
|
|
|
|
|
2024-03-19 12:06:12 -07:00
|
|
|
/* Instructions present in the rhs or the function arguments. */
|
2024-03-15 15:01:34 -07:00
|
|
|
struct hlsl_block *instrs;
|
|
|
|
|
2024-03-19 12:06:12 -07:00
|
|
|
/* For assignments, arguments of the rhs initializer.
|
|
|
|
* For function calls, the arguments themselves. */
|
2024-04-19 12:15:19 -07:00
|
|
|
struct hlsl_src *args;
|
2024-03-15 15:01:34 -07:00
|
|
|
unsigned int args_count;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct hlsl_state_block
|
|
|
|
{
|
|
|
|
struct hlsl_state_block_entry **entries;
|
|
|
|
size_t count, capacity;
|
|
|
|
};
|
|
|
|
|
2023-01-31 17:27:01 -08:00
|
|
|
/* Sized array of variables representing a function's parameters. */
|
|
|
|
struct hlsl_func_parameters
|
|
|
|
{
|
|
|
|
struct hlsl_ir_var **vars;
|
|
|
|
size_t count, capacity;
|
|
|
|
};
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_ir_function
|
|
|
|
{
|
2022-11-16 10:20:35 -08:00
|
|
|
/* Item entry in hlsl_ctx.functions */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct rb_entry entry;
|
2022-11-16 10:20:35 -08:00
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
const char *name;
|
2022-11-16 10:20:35 -08:00
|
|
|
/* Tree containing function definitions, stored as hlsl_ir_function_decl structures, which would
|
|
|
|
* be more than one in case of function overloading. */
|
vkd3d-shader/hlsl: Store function overloads in a list.
The choice to store them in an rbtree was made early on. It does not seem likely
that HLSL programs would define many overloads for any of their functions, but I
suspect the idea was rather that intrinsics would be defined as plain
hlsl_ir_function_decl structures [cf. 447463e5900ca6a636998a65429b8a08a5441657]
and that some intrinsics that could operate on any type would therefore need
many overrides.
This is not how we deal with intrinsics, however. When the first intrinsics were
implemented I made the choice disregard this intended design, and instead match
and convert their types manually, in C. Nothing that has happened in the time
since has led me to question that choice, and in fact, the flexibility with
which we must accommodate functions has led me to believe that matching in this
way was definitely the right choice. The main other designs I see would have
been:
* define each intrinsic variant separately using existing HLSL types. Besides
efficiency concerns (i.e. this would take more space in memory, and would take
longer to generate each variant), the normal type-matching rules don't really
apply to intrinsics.
[For example: elementwise intrinsics like abs() return the same type as the
input, including preserving the distinction between float and float1. It is
legal to define separate HLSL overloads taking float and float1, but trying to
invoke these functions yields an "ambiguous function call" error.]
* introduce new (semi-)generic types. This is far more code and ends up acting
like our current scheme (with helpers) in a slightly more complex form.
So I think we can go ahead and rip out this vestige of the original design for
intrinsics.
As for why to change it: rbtrees are simply more complex to deal with, and it
seems unlikely to me that the difference is going to matter. I do not expect any
program to define large quantities of intrinsics; linked list search should be
good enough.
2023-09-08 14:27:10 -07:00
|
|
|
struct list overloads;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
|
|
|
struct hlsl_ir_function_decl
|
|
|
|
{
|
|
|
|
struct hlsl_type *return_type;
|
2022-11-16 10:20:35 -08:00
|
|
|
/* Synthetic variable used to store the return value of the function. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_ir_var *return_var;
|
2022-11-16 10:20:35 -08:00
|
|
|
|
2021-02-12 08:48:55 -08:00
|
|
|
struct vkd3d_shader_location loc;
|
vkd3d-shader/hlsl: Store function overloads in a list.
The choice to store them in an rbtree was made early on. It does not seem likely
that HLSL programs would define many overloads for any of their functions, but I
suspect the idea was rather that intrinsics would be defined as plain
hlsl_ir_function_decl structures [cf. 447463e5900ca6a636998a65429b8a08a5441657]
and that some intrinsics that could operate on any type would therefore need
many overrides.
This is not how we deal with intrinsics, however. When the first intrinsics were
implemented I made the choice disregard this intended design, and instead match
and convert their types manually, in C. Nothing that has happened in the time
since has led me to question that choice, and in fact, the flexibility with
which we must accommodate functions has led me to believe that matching in this
way was definitely the right choice. The main other designs I see would have
been:
* define each intrinsic variant separately using existing HLSL types. Besides
efficiency concerns (i.e. this would take more space in memory, and would take
longer to generate each variant), the normal type-matching rules don't really
apply to intrinsics.
[For example: elementwise intrinsics like abs() return the same type as the
input, including preserving the distinction between float and float1. It is
legal to define separate HLSL overloads taking float and float1, but trying to
invoke these functions yields an "ambiguous function call" error.]
* introduce new (semi-)generic types. This is far more code and ends up acting
like our current scheme (with helpers) in a slightly more complex form.
So I think we can go ahead and rip out this vestige of the original design for
intrinsics.
As for why to change it: rbtrees are simply more complex to deal with, and it
seems unlikely to me that the difference is going to matter. I do not expect any
program to define large quantities of intrinsics; linked list search should be
good enough.
2023-09-08 14:27:10 -07:00
|
|
|
/* Item entry in hlsl_ir_function.overloads. */
|
|
|
|
struct list entry;
|
2022-11-16 10:20:35 -08:00
|
|
|
|
|
|
|
/* Function to which this declaration corresponds. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_ir_function *func;
|
2023-01-31 17:27:01 -08:00
|
|
|
|
|
|
|
struct hlsl_func_parameters parameters;
|
2022-11-16 10:20:35 -08:00
|
|
|
|
2021-10-15 14:54:10 -07:00
|
|
|
struct hlsl_block body;
|
2021-10-15 14:54:09 -07:00
|
|
|
bool has_body;
|
2022-11-16 10:20:35 -08:00
|
|
|
/* Array of attributes (like numthreads) specified just before the function declaration.
|
|
|
|
* Not to be confused with the function parameters! */
|
2021-08-16 12:52:10 -07:00
|
|
|
unsigned int attr_count;
|
|
|
|
const struct hlsl_attribute *const *attrs;
|
2021-09-13 21:08:34 -07:00
|
|
|
|
|
|
|
/* Synthetic boolean variable marking whether a return statement has been
|
|
|
|
* executed. Needed to deal with return statements in non-uniform control
|
|
|
|
* flow, since some backends can't handle them. */
|
|
|
|
struct hlsl_ir_var *early_return_var;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2021-09-09 19:06:38 -07:00
|
|
|
struct hlsl_ir_call
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
2023-01-16 11:14:09 -08:00
|
|
|
struct hlsl_ir_function_decl *decl;
|
2021-09-09 19:06:38 -07:00
|
|
|
};
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_ir_if
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
|
|
|
struct hlsl_src condition;
|
2022-11-10 18:04:22 -08:00
|
|
|
struct hlsl_block then_block;
|
|
|
|
struct hlsl_block else_block;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2024-04-10 10:13:51 -07:00
|
|
|
enum hlsl_ir_loop_unroll_type
|
|
|
|
{
|
|
|
|
HLSL_IR_LOOP_UNROLL,
|
|
|
|
HLSL_IR_LOOP_FORCE_UNROLL,
|
|
|
|
HLSL_IR_LOOP_FORCE_LOOP
|
|
|
|
};
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_ir_loop
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
|
|
|
/* loop condition is stored in the body (as "if (!condition) break;") */
|
2021-10-15 14:54:10 -07:00
|
|
|
struct hlsl_block body;
|
2021-01-27 08:29:44 -08:00
|
|
|
unsigned int next_index; /* liveness index of the end of the loop */
|
2024-04-10 10:13:51 -07:00
|
|
|
unsigned int unroll_limit;
|
|
|
|
enum hlsl_ir_loop_unroll_type unroll_type;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2023-10-11 04:51:51 -07:00
|
|
|
struct hlsl_ir_switch_case
|
|
|
|
{
|
|
|
|
unsigned int value;
|
|
|
|
bool is_default;
|
|
|
|
struct hlsl_block body;
|
|
|
|
struct list entry;
|
|
|
|
struct vkd3d_shader_location loc;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct hlsl_ir_switch
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
|
|
|
struct hlsl_src selector;
|
|
|
|
struct list cases;
|
|
|
|
};
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
enum hlsl_ir_expr_op
|
|
|
|
{
|
2021-09-09 19:06:38 -07:00
|
|
|
HLSL_OP0_VOID,
|
2024-06-10 15:06:54 -07:00
|
|
|
HLSL_OP0_RASTERIZER_SAMPLE_COUNT,
|
2021-09-09 19:06:38 -07:00
|
|
|
|
2021-08-12 17:36:13 -07:00
|
|
|
HLSL_OP1_ABS,
|
|
|
|
HLSL_OP1_BIT_NOT,
|
|
|
|
HLSL_OP1_CAST,
|
2023-10-17 14:04:08 -07:00
|
|
|
HLSL_OP1_CEIL,
|
2021-08-12 17:36:13 -07:00
|
|
|
HLSL_OP1_COS,
|
2024-07-09 13:33:47 -07:00
|
|
|
HLSL_OP1_COS_REDUCED, /* Reduced range [-pi, pi], writes to .x */
|
2021-08-12 17:36:13 -07:00
|
|
|
HLSL_OP1_DSX,
|
2023-06-19 12:29:58 -07:00
|
|
|
HLSL_OP1_DSX_COARSE,
|
2023-06-19 13:05:36 -07:00
|
|
|
HLSL_OP1_DSX_FINE,
|
2021-08-12 17:36:13 -07:00
|
|
|
HLSL_OP1_DSY,
|
2023-06-19 12:29:58 -07:00
|
|
|
HLSL_OP1_DSY_COARSE,
|
2023-06-19 13:05:36 -07:00
|
|
|
HLSL_OP1_DSY_FINE,
|
2021-08-12 17:36:13 -07:00
|
|
|
HLSL_OP1_EXP2,
|
2024-07-06 08:24:22 -07:00
|
|
|
HLSL_OP1_F16TOF32,
|
2022-02-02 04:46:15 -08:00
|
|
|
HLSL_OP1_FLOOR,
|
2021-08-12 17:36:13 -07:00
|
|
|
HLSL_OP1_FRACT,
|
|
|
|
HLSL_OP1_LOG2,
|
|
|
|
HLSL_OP1_LOGIC_NOT,
|
|
|
|
HLSL_OP1_NEG,
|
|
|
|
HLSL_OP1_NRM,
|
|
|
|
HLSL_OP1_RCP,
|
2022-10-09 20:59:12 -07:00
|
|
|
HLSL_OP1_REINTERPRET,
|
2021-11-19 06:38:41 -08:00
|
|
|
HLSL_OP1_ROUND,
|
2021-08-12 17:36:13 -07:00
|
|
|
HLSL_OP1_RSQ,
|
|
|
|
HLSL_OP1_SAT,
|
|
|
|
HLSL_OP1_SIGN,
|
|
|
|
HLSL_OP1_SIN,
|
2024-07-09 13:33:47 -07:00
|
|
|
HLSL_OP1_SIN_REDUCED, /* Reduced range [-pi, pi], writes to .y */
|
2021-08-12 17:36:13 -07:00
|
|
|
HLSL_OP1_SQRT,
|
2023-04-14 14:00:57 -07:00
|
|
|
HLSL_OP1_TRUNC,
|
2021-08-12 17:36:13 -07:00
|
|
|
|
|
|
|
HLSL_OP2_ADD,
|
|
|
|
HLSL_OP2_BIT_AND,
|
|
|
|
HLSL_OP2_BIT_OR,
|
|
|
|
HLSL_OP2_BIT_XOR,
|
|
|
|
HLSL_OP2_CRS,
|
|
|
|
HLSL_OP2_DIV,
|
|
|
|
HLSL_OP2_DOT,
|
|
|
|
HLSL_OP2_EQUAL,
|
|
|
|
HLSL_OP2_GEQUAL,
|
|
|
|
HLSL_OP2_LESS,
|
|
|
|
HLSL_OP2_LOGIC_AND,
|
|
|
|
HLSL_OP2_LOGIC_OR,
|
|
|
|
HLSL_OP2_LSHIFT,
|
|
|
|
HLSL_OP2_MAX,
|
|
|
|
HLSL_OP2_MIN,
|
|
|
|
HLSL_OP2_MOD,
|
|
|
|
HLSL_OP2_MUL,
|
|
|
|
HLSL_OP2_NEQUAL,
|
|
|
|
HLSL_OP2_RSHIFT,
|
2024-02-26 10:24:38 -08:00
|
|
|
/* SLT(a, b) retrieves 1.0 if (a < b), else 0.0. Only used for SM1-SM3 target vertex shaders. */
|
2024-02-22 06:40:26 -08:00
|
|
|
HLSL_OP2_SLT,
|
2021-08-12 17:36:13 -07:00
|
|
|
|
2023-09-12 08:30:55 -07:00
|
|
|
/* DP2ADD(a, b, c) computes the scalar product of a.xy and b.xy,
|
|
|
|
* then adds c. */
|
2023-01-26 12:56:00 -08:00
|
|
|
HLSL_OP3_DP2ADD,
|
2024-03-01 11:01:03 -08:00
|
|
|
/* TERNARY(a, b, c) returns 'b' if 'a' is true and 'c' otherwise. 'a' must always be boolean.
|
2024-04-05 12:34:32 -07:00
|
|
|
* CMP(a, b, c) returns 'b' if 'a' >= 0, and 'c' otherwise. It's used only for SM1-SM3 targets. */
|
2023-09-26 11:48:58 -07:00
|
|
|
HLSL_OP3_CMP,
|
2023-07-24 23:46:28 -07:00
|
|
|
HLSL_OP3_TERNARY,
|
2024-07-10 19:47:51 -07:00
|
|
|
HLSL_OP3_MAD,
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2021-09-27 18:51:44 -07:00
|
|
|
#define HLSL_MAX_OPERANDS 3
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_ir_expr
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
|
|
|
enum hlsl_ir_expr_op op;
|
2021-09-27 18:51:44 -07:00
|
|
|
struct hlsl_src operands[HLSL_MAX_OPERANDS];
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
|
|
|
enum hlsl_ir_jump_type
|
|
|
|
{
|
|
|
|
HLSL_IR_JUMP_BREAK,
|
|
|
|
HLSL_IR_JUMP_CONTINUE,
|
2023-06-08 03:42:50 -07:00
|
|
|
HLSL_IR_JUMP_DISCARD_NEG,
|
|
|
|
HLSL_IR_JUMP_DISCARD_NZ,
|
2021-01-27 08:29:44 -08:00
|
|
|
HLSL_IR_JUMP_RETURN,
|
2023-09-25 11:29:54 -07:00
|
|
|
/* UNRESOLVED_CONTINUE type is used by the parser when 'continue' statement is found,
|
|
|
|
it never reaches code generation, and is resolved to CONTINUE type once iteration
|
|
|
|
and loop exit logic was properly applied. */
|
|
|
|
HLSL_IR_JUMP_UNRESOLVED_CONTINUE,
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
|
|
|
struct hlsl_ir_jump
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
|
|
|
enum hlsl_ir_jump_type type;
|
2023-06-08 03:42:50 -07:00
|
|
|
/* Argument used for HLSL_IR_JUMP_DISCARD_NZ and HLSL_IR_JUMP_DISCARD_NEG. */
|
2023-06-08 00:42:58 -07:00
|
|
|
struct hlsl_src condition;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
|
|
|
struct hlsl_ir_swizzle
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
|
|
|
struct hlsl_src val;
|
2023-12-06 09:20:25 -08:00
|
|
|
uint32_t swizzle;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2023-02-24 11:39:56 -08:00
|
|
|
struct hlsl_ir_index
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
|
|
|
struct hlsl_src val, idx;
|
|
|
|
};
|
|
|
|
|
2022-11-15 19:33:20 -08:00
|
|
|
/* Reference to a variable, or a part of it (e.g. a vector within a matrix within a struct). */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_deref
|
|
|
|
{
|
|
|
|
struct hlsl_ir_var *var;
|
2022-06-30 15:20:20 -07:00
|
|
|
|
2022-11-15 19:33:20 -08:00
|
|
|
/* An array of references to instruction nodes, of data type uint, that are used to reach the
|
|
|
|
* desired part of the variable.
|
|
|
|
* If path_len is 0, then this is a reference to the whole variable.
|
|
|
|
* The value of each instruction node in the path corresponds to the index of the element/field
|
|
|
|
* that has to be selected on each nesting level to reach this part.
|
|
|
|
* The path shall not contain additional values once a type that cannot be subdivided
|
|
|
|
* (a.k.a. "component") is reached. */
|
2022-06-30 15:20:20 -07:00
|
|
|
unsigned int path_len;
|
|
|
|
struct hlsl_src *path;
|
|
|
|
|
vkd3d-shader/hlsl: Split deref-offset into a node and a constant uint.
This uint will be used for the following:
- Since SM4's relative addressing (the capability of passing a register
as an index to another register) only has whole-register granularity,
we will need to make the offset node express the offset in
whole-registers and specify the register component in this uint,
otherwise we would have to add additional / and % operations in the
output binary.
- If, after we apply constant folding and copy propagation, we determine
that the offset is a single constant node, we can store all the offset
in this uint constant, and remove the offset src.
This allows DCE to remove a good bunch of the nodes previously required
only for the offset constants, which makes the output more liteweight
and readable, and simplifies the implementation of relative addressing
when writing tpf in the following patches.
In dump_deref(), we use "c" to indicate components instead of whole
registers. Since now both the offset node and the offset uint are in
components a lowered deref would look like:
var[@42c + 2c]
But, once we express the offset node in whole registers we will remove
the "c" from the node part:
var[@22 + 3c]
2023-10-03 12:47:13 -07:00
|
|
|
/* Before writing the bytecode, deref paths are lowered into an offset (within the pertaining
|
|
|
|
* regset) from the start of the variable, to the part of the variable that is referenced.
|
|
|
|
* This offset is stored using two fields, one for a variable part and other for a constant
|
|
|
|
* part, which are added together:
|
2023-10-06 09:56:24 -07:00
|
|
|
* - rel_offset: An offset given by an instruction node, in whole registers.
|
vkd3d-shader/hlsl: Split deref-offset into a node and a constant uint.
This uint will be used for the following:
- Since SM4's relative addressing (the capability of passing a register
as an index to another register) only has whole-register granularity,
we will need to make the offset node express the offset in
whole-registers and specify the register component in this uint,
otherwise we would have to add additional / and % operations in the
output binary.
- If, after we apply constant folding and copy propagation, we determine
that the offset is a single constant node, we can store all the offset
in this uint constant, and remove the offset src.
This allows DCE to remove a good bunch of the nodes previously required
only for the offset constants, which makes the output more liteweight
and readable, and simplifies the implementation of relative addressing
when writing tpf in the following patches.
In dump_deref(), we use "c" to indicate components instead of whole
registers. Since now both the offset node and the offset uint are in
components a lowered deref would look like:
var[@42c + 2c]
But, once we express the offset node in whole registers we will remove
the "c" from the node part:
var[@22 + 3c]
2023-10-03 12:47:13 -07:00
|
|
|
* - const_offset: A constant number of register components.
|
2023-05-29 14:34:03 -07:00
|
|
|
* Since the type information cannot longer be retrieved from the offset alone, the type is
|
2023-10-04 11:31:46 -07:00
|
|
|
* stored in the data_type field, which remains NULL if the deref hasn't been lowered yet. */
|
2023-10-06 09:56:24 -07:00
|
|
|
struct hlsl_src rel_offset;
|
vkd3d-shader/hlsl: Split deref-offset into a node and a constant uint.
This uint will be used for the following:
- Since SM4's relative addressing (the capability of passing a register
as an index to another register) only has whole-register granularity,
we will need to make the offset node express the offset in
whole-registers and specify the register component in this uint,
otherwise we would have to add additional / and % operations in the
output binary.
- If, after we apply constant folding and copy propagation, we determine
that the offset is a single constant node, we can store all the offset
in this uint constant, and remove the offset src.
This allows DCE to remove a good bunch of the nodes previously required
only for the offset constants, which makes the output more liteweight
and readable, and simplifies the implementation of relative addressing
when writing tpf in the following patches.
In dump_deref(), we use "c" to indicate components instead of whole
registers. Since now both the offset node and the offset uint are in
components a lowered deref would look like:
var[@42c + 2c]
But, once we express the offset node in whole registers we will remove
the "c" from the node part:
var[@22 + 3c]
2023-10-03 12:47:13 -07:00
|
|
|
unsigned int const_offset;
|
2023-05-29 14:34:03 -07:00
|
|
|
struct hlsl_type *data_type;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2023-10-04 11:31:46 -07:00
|
|
|
/* Whether the path has been lowered to an offset or not. */
|
|
|
|
static inline bool hlsl_deref_is_lowered(const struct hlsl_deref *deref)
|
|
|
|
{
|
|
|
|
return !!deref->data_type;
|
|
|
|
}
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_ir_load
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
|
|
|
struct hlsl_deref src;
|
|
|
|
};
|
|
|
|
|
2021-10-07 19:58:57 -07:00
|
|
|
enum hlsl_resource_load_type
|
|
|
|
{
|
|
|
|
HLSL_RESOURCE_LOAD,
|
2021-11-05 11:35:52 -07:00
|
|
|
HLSL_RESOURCE_SAMPLE,
|
2023-05-16 11:54:22 -07:00
|
|
|
HLSL_RESOURCE_SAMPLE_CMP,
|
2023-05-26 02:14:07 -07:00
|
|
|
HLSL_RESOURCE_SAMPLE_CMP_LZ,
|
2021-08-16 18:28:47 -07:00
|
|
|
HLSL_RESOURCE_SAMPLE_LOD,
|
2023-04-19 10:24:14 -07:00
|
|
|
HLSL_RESOURCE_SAMPLE_LOD_BIAS,
|
2023-05-05 08:13:18 -07:00
|
|
|
HLSL_RESOURCE_SAMPLE_GRAD,
|
2023-11-09 09:43:41 -08:00
|
|
|
HLSL_RESOURCE_SAMPLE_PROJ,
|
2022-01-26 06:35:32 -08:00
|
|
|
HLSL_RESOURCE_GATHER_RED,
|
|
|
|
HLSL_RESOURCE_GATHER_GREEN,
|
|
|
|
HLSL_RESOURCE_GATHER_BLUE,
|
|
|
|
HLSL_RESOURCE_GATHER_ALPHA,
|
2023-06-07 10:56:02 -07:00
|
|
|
HLSL_RESOURCE_SAMPLE_INFO,
|
|
|
|
HLSL_RESOURCE_RESINFO,
|
2021-10-07 19:58:57 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
struct hlsl_ir_resource_load
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
|
|
|
enum hlsl_resource_load_type load_type;
|
2021-11-05 11:35:52 -07:00
|
|
|
struct hlsl_deref resource, sampler;
|
2023-05-16 11:54:22 -07:00
|
|
|
struct hlsl_src coords, lod, ddx, ddy, cmp, sample_index, texel_offset;
|
2022-11-25 15:38:33 -08:00
|
|
|
enum hlsl_sampler_dim sampling_dim;
|
2021-10-07 19:58:57 -07:00
|
|
|
};
|
|
|
|
|
2021-08-15 10:08:32 -07:00
|
|
|
struct hlsl_ir_resource_store
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
|
|
|
struct hlsl_deref resource;
|
|
|
|
struct hlsl_src coords, value;
|
|
|
|
};
|
|
|
|
|
2021-04-08 21:38:22 -07:00
|
|
|
struct hlsl_ir_store
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
|
|
|
struct hlsl_deref lhs;
|
|
|
|
struct hlsl_src rhs;
|
|
|
|
unsigned char writemask;
|
|
|
|
};
|
|
|
|
|
|
|
|
struct hlsl_ir_constant
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
2022-11-11 16:39:55 -08:00
|
|
|
struct hlsl_constant_value
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2024-04-08 13:15:20 -07:00
|
|
|
union hlsl_constant_value_component u[4];
|
2022-11-11 16:39:55 -08:00
|
|
|
} value;
|
2022-11-15 19:07:03 -08:00
|
|
|
/* Constant register of type 'c' where the constant value is stored for SM1. */
|
2021-04-08 21:38:26 -07:00
|
|
|
struct hlsl_reg reg;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2024-06-14 16:59:21 -07:00
|
|
|
struct hlsl_ir_string_constant
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
|
|
|
char *string;
|
|
|
|
};
|
|
|
|
|
2024-03-18 11:31:04 -07:00
|
|
|
/* Stateblock constants are undeclared values found on state blocks or technique passes descriptions,
|
|
|
|
* that do not concern regular pixel, vertex, or compute shaders, except for parsing. */
|
|
|
|
struct hlsl_ir_stateblock_constant
|
|
|
|
{
|
|
|
|
struct hlsl_ir_node node;
|
|
|
|
char *name;
|
|
|
|
};
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_scope
|
|
|
|
{
|
2022-11-16 10:39:51 -08:00
|
|
|
/* Item entry for hlsl_ctx.scopes. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct list entry;
|
2022-11-16 10:39:51 -08:00
|
|
|
|
|
|
|
/* List containing the variables declared in this scope; linked by hlsl_ir_var->scope_entry. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct list vars;
|
2022-11-16 10:39:51 -08:00
|
|
|
/* Tree map containing the types declared in this scope, using hlsl_tree.name as key.
|
|
|
|
* The types are attached through the hlsl_type.scope_entry fields. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct rb_tree types;
|
2022-11-16 10:39:51 -08:00
|
|
|
/* Scope containing this scope. This value is NULL for the global scope. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_scope *upper;
|
2023-09-25 07:30:02 -07:00
|
|
|
/* The scope was created for the loop statement. */
|
|
|
|
bool loop;
|
2023-10-24 12:05:42 -07:00
|
|
|
/* The scope was created for the switch statement. */
|
|
|
|
bool _switch;
|
2024-06-15 10:56:25 -07:00
|
|
|
/* The scope contains annotation variables. */
|
|
|
|
bool annotations;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2021-04-08 21:38:25 -07:00
|
|
|
struct hlsl_profile_info
|
|
|
|
{
|
|
|
|
const char *name;
|
|
|
|
enum vkd3d_shader_type type;
|
|
|
|
unsigned int major_version;
|
|
|
|
unsigned int minor_version;
|
|
|
|
unsigned int major_level;
|
|
|
|
unsigned int minor_level;
|
|
|
|
bool software;
|
|
|
|
};
|
|
|
|
|
2021-04-27 10:14:17 -07:00
|
|
|
struct hlsl_vec4
|
|
|
|
{
|
|
|
|
float f[4];
|
|
|
|
};
|
|
|
|
|
2021-06-21 21:37:09 -07:00
|
|
|
enum hlsl_buffer_type
|
|
|
|
{
|
|
|
|
HLSL_BUFFER_CONSTANT,
|
|
|
|
HLSL_BUFFER_TEXTURE,
|
|
|
|
};
|
|
|
|
|
2022-11-16 09:02:15 -08:00
|
|
|
/* In SM4, uniform variables are organized in different buffers. Besides buffers defined in the
|
|
|
|
* source code, there is also the implicit $Globals buffer and the implicit $Params buffer,
|
|
|
|
* to which uniform globals and parameters belong by default. */
|
2021-06-21 21:37:09 -07:00
|
|
|
struct hlsl_buffer
|
|
|
|
{
|
|
|
|
struct vkd3d_shader_location loc;
|
|
|
|
enum hlsl_buffer_type type;
|
|
|
|
const char *name;
|
2024-02-22 04:11:18 -08:00
|
|
|
uint32_t modifiers;
|
2022-11-16 09:02:15 -08:00
|
|
|
/* Register reserved for this buffer, if any.
|
|
|
|
* If provided, it should be of type 'b' if type is HLSL_BUFFER_CONSTANT and 't' if type is
|
|
|
|
* HLSL_BUFFER_TEXTURE. */
|
2021-06-21 21:37:09 -07:00
|
|
|
struct hlsl_reg_reservation reservation;
|
2024-03-27 18:22:15 -07:00
|
|
|
/* Scope that contains annotations for this buffer. */
|
|
|
|
struct hlsl_scope *annotations;
|
2022-11-16 09:02:15 -08:00
|
|
|
/* Item entry for hlsl_ctx.buffers */
|
2021-06-21 21:37:09 -07:00
|
|
|
struct list entry;
|
2021-06-23 21:57:35 -07:00
|
|
|
|
2022-11-16 09:02:15 -08:00
|
|
|
/* The size of the buffer (in register components), and the size of the buffer as determined
|
|
|
|
* by its last variable that's actually used. */
|
2021-06-23 21:57:35 -07:00
|
|
|
unsigned size, used_size;
|
2022-11-16 09:02:15 -08:00
|
|
|
/* Register of type 'b' on which the buffer is allocated. */
|
2021-06-23 21:57:35 -07:00
|
|
|
struct hlsl_reg reg;
|
2023-02-21 17:39:24 -08:00
|
|
|
|
|
|
|
bool manually_packed_elements;
|
|
|
|
bool automatically_packed_elements;
|
2021-06-21 21:37:09 -07:00
|
|
|
};
|
|
|
|
|
2021-02-04 14:33:53 -08:00
|
|
|
struct hlsl_ctx
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2021-04-08 21:38:25 -07:00
|
|
|
const struct hlsl_profile_info *profile;
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
const char **source_files;
|
|
|
|
unsigned int source_files_count;
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Current location being read in the HLSL source, updated while parsing. */
|
2021-02-12 08:48:56 -08:00
|
|
|
struct vkd3d_shader_location location;
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Stores the logging messages and logging configuration. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct vkd3d_shader_message_context *message_context;
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Cache for temporary string allocations. */
|
2021-02-27 16:03:09 -08:00
|
|
|
struct vkd3d_string_buffer_cache string_buffers;
|
2022-11-14 13:03:37 -08:00
|
|
|
/* A value from enum vkd3d_result with the current success/failure result of the whole
|
|
|
|
* compilation.
|
|
|
|
* It is initialized to VKD3D_OK and set to an error code in case a call to hlsl_fixme() or
|
|
|
|
* hlsl_error() is triggered, or in case of a memory allocation error.
|
|
|
|
* The value of this field is checked between compilation stages to stop execution in case of
|
|
|
|
* failure. */
|
2021-05-20 22:32:23 -07:00
|
|
|
int result;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Pointer to an opaque data structure managed by FLEX (during lexing), that encapsulates the
|
|
|
|
* current state of the scanner. This pointer is required by all FLEX API functions when the
|
|
|
|
* scanner is declared as reentrant, which is the case. */
|
2021-02-04 14:33:53 -08:00
|
|
|
void *scanner;
|
|
|
|
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Pointer to the current scope; changes as the parser reads the code. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_scope *cur_scope;
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Scope of global variables. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_scope *globals;
|
2023-01-31 20:18:35 -08:00
|
|
|
/* Dummy scope for variables which should never be looked up by name. */
|
|
|
|
struct hlsl_scope *dummy_scope;
|
2022-11-14 13:03:37 -08:00
|
|
|
/* List of all the scopes in the program; linked by the hlsl_scope.entry fields. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct list scopes;
|
2022-11-14 13:03:37 -08:00
|
|
|
/* List of all the extern variables; linked by the hlsl_ir_var.extern_entry fields.
|
|
|
|
* This exists as a convenience because it is often necessary to iterate all extern variables
|
|
|
|
* and these can be declared in global scope, as function parameters, or as the function
|
|
|
|
* return value. */
|
2021-04-15 17:03:44 -07:00
|
|
|
struct list extern_vars;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2022-11-14 13:03:37 -08:00
|
|
|
/* List containing both the built-in HLSL buffers ($Globals and $Params) and the ones declared
|
|
|
|
* in the shader; linked by the hlsl_buffer.entry fields. */
|
2021-06-21 21:37:09 -07:00
|
|
|
struct list buffers;
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Current buffer (changes as the parser reads the code), $Globals buffer, and $Params buffer,
|
|
|
|
* respectively. */
|
2021-07-08 19:13:18 -07:00
|
|
|
struct hlsl_buffer *cur_buffer, *globals_buffer, *params_buffer;
|
2022-11-14 13:03:37 -08:00
|
|
|
/* List containing all created hlsl_types, except builtin_types; linked by the hlsl_type.entry
|
|
|
|
* fields. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct list types;
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Tree map for the declared functions, using hlsl_ir_function.name as key.
|
|
|
|
* The functions are attached through the hlsl_ir_function.entry fields. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct rb_tree functions;
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Pointer to the current function; changes as the parser reads the code. */
|
2021-01-27 08:29:44 -08:00
|
|
|
const struct hlsl_ir_function_decl *cur_function;
|
|
|
|
|
2023-08-29 14:50:13 -07:00
|
|
|
/* Counter for generating unique internal variable names. */
|
|
|
|
unsigned int internal_name_counter;
|
|
|
|
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Default matrix majority for matrix types. Can be set by a pragma within the HLSL source. */
|
2022-03-29 16:49:10 -07:00
|
|
|
unsigned int matrix_majority;
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Basic data types stored for convenience. */
|
2021-01-27 08:29:44 -08:00
|
|
|
struct
|
|
|
|
{
|
|
|
|
struct hlsl_type *scalar[HLSL_TYPE_LAST_SCALAR + 1];
|
|
|
|
struct hlsl_type *vector[HLSL_TYPE_LAST_SCALAR + 1][4];
|
2022-11-14 13:03:37 -08:00
|
|
|
/* matrix[HLSL_TYPE_FLOAT][1][3] is a float4x2, i.e. dimx = 2, dimy = 4 */
|
2021-09-27 18:51:46 -07:00
|
|
|
struct hlsl_type *matrix[HLSL_TYPE_LAST_SCALAR + 1][4][4];
|
2022-01-27 10:31:23 -08:00
|
|
|
struct hlsl_type *sampler[HLSL_SAMPLER_DIM_LAST_SAMPLER + 1];
|
2024-06-14 16:59:21 -07:00
|
|
|
struct hlsl_type *string;
|
2021-01-27 08:29:44 -08:00
|
|
|
struct hlsl_type *Void;
|
2024-07-23 06:30:27 -07:00
|
|
|
struct hlsl_type *null;
|
2021-01-27 08:29:44 -08:00
|
|
|
} builtin_types;
|
|
|
|
|
2023-06-09 06:28:06 -07:00
|
|
|
/* List of the instruction nodes for initializing static variables. */
|
|
|
|
struct hlsl_block static_initializers;
|
2021-04-27 10:14:17 -07:00
|
|
|
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Dynamic array of constant values that appear in the shader, associated to the 'c' registers.
|
|
|
|
* Only used for SM1 profiles. */
|
2021-04-27 10:14:17 -07:00
|
|
|
struct hlsl_constant_defs
|
|
|
|
{
|
2023-02-24 14:42:26 -08:00
|
|
|
struct hlsl_constant_register
|
|
|
|
{
|
|
|
|
uint32_t index;
|
|
|
|
struct hlsl_vec4 value;
|
|
|
|
} *regs;
|
2021-04-27 10:14:17 -07:00
|
|
|
size_t count, size;
|
|
|
|
} constant_defs;
|
vkd3d-shader/hlsl: Add missing src1 and src2 constants to sincos on SM2.
The sincos instruction expects two specific constants on 2.0 and 2.1 profiles.
Consider the following shader:
uniform float u;
float4 main() : sv_target
{
return sin(u);
}
On native, with ps_2_0, this compiles as:
ps_2_0
def c3, 0.159154937, 0.5, 6.28318548, -3.14159274
def c1, -1.55009923e-006, -2.17013894e-005, 0.00260416674, 0.00026041668
def c2, -0.020833334, -0.125, 1, 0.5
mov r0.xy, c3
mad r0.x, c0.x, r0.x, r0.y
frc r0.x, r0.x
mad r0.x, r0.x, c3.z, c3.w
sincos r1.y, r0.x, c1, c2
mov r0, r1.y
mov oC0, r0
We are not emitting the src1 and src2 constant arguments before this
patch.
2024-07-23 14:12:53 -07:00
|
|
|
/* 'c' registers where the constants expected by SM2 sincos are stored. */
|
|
|
|
struct hlsl_reg d3dsincosconst1, d3dsincosconst2;
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Number of temp. registers required for the shader to run, i.e. the largest temp register
|
|
|
|
* index that will be used in the output bytecode (+1). */
|
2021-08-19 16:44:30 -07:00
|
|
|
uint32_t temp_count;
|
2021-09-27 18:51:42 -07:00
|
|
|
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Number of threads to be executed (on the X, Y, and Z dimensions) in a single thread group in
|
|
|
|
* compute shader profiles. It is set using the numthreads() attribute in the entry point. */
|
2021-08-16 15:29:34 -07:00
|
|
|
uint32_t thread_count[3];
|
|
|
|
|
2023-08-07 15:20:10 -07:00
|
|
|
/* In some cases we generate opcodes by parsing an HLSL function and then
|
|
|
|
* invoking it. If not NULL, this field is the name of the function that we
|
|
|
|
* are currently parsing, "mangled" with an internal prefix to avoid
|
|
|
|
* polluting the user namespace. */
|
|
|
|
const char *internal_func_name;
|
|
|
|
|
2023-03-07 17:04:37 -08:00
|
|
|
/* Whether the parser is inside a state block (effects' metadata) inside a variable declaration. */
|
2021-09-27 18:51:42 -07:00
|
|
|
uint32_t in_state_block : 1;
|
2022-11-14 13:03:37 -08:00
|
|
|
/* Whether the numthreads() attribute has been provided in the entry-point function. */
|
2021-08-16 15:29:34 -07:00
|
|
|
uint32_t found_numthreads : 1;
|
2023-09-28 07:26:00 -07:00
|
|
|
|
|
|
|
bool semantic_compat_mapping;
|
2024-02-24 14:47:48 -08:00
|
|
|
bool child_effect;
|
2024-03-28 03:39:04 -07:00
|
|
|
bool include_empty_buffers;
|
2024-03-19 21:06:07 -07:00
|
|
|
bool warn_implicit_truncation;
|
2021-01-27 08:29:44 -08:00
|
|
|
};
|
|
|
|
|
2024-04-03 12:28:18 -07:00
|
|
|
static inline bool hlsl_version_ge(const struct hlsl_ctx *ctx, unsigned int major, unsigned int minor)
|
|
|
|
{
|
|
|
|
return ctx->profile->major_version > major || (ctx->profile->major_version == major && ctx->profile->minor_version >= minor);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline bool hlsl_version_lt(const struct hlsl_ctx *ctx, unsigned int major, unsigned int minor)
|
|
|
|
{
|
|
|
|
return !hlsl_version_ge(ctx, major, minor);
|
|
|
|
}
|
|
|
|
|
2022-09-26 16:20:22 -07:00
|
|
|
struct hlsl_resource_load_params
|
|
|
|
{
|
|
|
|
struct hlsl_type *format;
|
|
|
|
enum hlsl_resource_load_type type;
|
2023-03-10 17:09:58 -08:00
|
|
|
struct hlsl_ir_node *resource, *sampler;
|
2023-05-16 11:54:22 -07:00
|
|
|
struct hlsl_ir_node *coords, *lod, *ddx, *ddy, *cmp, *sample_index, *texel_offset;
|
2022-11-25 15:38:33 -08:00
|
|
|
enum hlsl_sampler_dim sampling_dim;
|
2022-09-26 16:20:22 -07:00
|
|
|
};
|
|
|
|
|
2021-09-09 19:06:38 -07:00
|
|
|
static inline struct hlsl_ir_call *hlsl_ir_call(const struct hlsl_ir_node *node)
|
|
|
|
{
|
2024-08-02 01:32:58 -07:00
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_CALL);
|
2021-09-09 19:06:38 -07:00
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_call, node);
|
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:14 -08:00
|
|
|
static inline struct hlsl_ir_constant *hlsl_ir_constant(const struct hlsl_ir_node *node)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2024-08-02 01:32:58 -07:00
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_CONSTANT);
|
2021-01-27 08:29:44 -08:00
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_constant, node);
|
|
|
|
}
|
|
|
|
|
2024-06-14 16:59:21 -07:00
|
|
|
static inline struct hlsl_ir_string_constant *hlsl_ir_string_constant(const struct hlsl_ir_node *node)
|
|
|
|
{
|
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_STRING_CONSTANT);
|
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_string_constant, node);
|
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:14 -08:00
|
|
|
static inline struct hlsl_ir_expr *hlsl_ir_expr(const struct hlsl_ir_node *node)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2024-08-02 01:32:58 -07:00
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_EXPR);
|
2021-01-27 08:29:44 -08:00
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_expr, node);
|
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:14 -08:00
|
|
|
static inline struct hlsl_ir_if *hlsl_ir_if(const struct hlsl_ir_node *node)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2024-08-02 01:32:58 -07:00
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_IF);
|
2021-01-27 08:29:44 -08:00
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_if, node);
|
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:14 -08:00
|
|
|
static inline struct hlsl_ir_jump *hlsl_ir_jump(const struct hlsl_ir_node *node)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2024-08-02 01:32:58 -07:00
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_JUMP);
|
2021-01-27 08:29:44 -08:00
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_jump, node);
|
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:14 -08:00
|
|
|
static inline struct hlsl_ir_load *hlsl_ir_load(const struct hlsl_ir_node *node)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2024-08-02 01:32:58 -07:00
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_LOAD);
|
2021-01-27 08:29:44 -08:00
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_load, node);
|
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:14 -08:00
|
|
|
static inline struct hlsl_ir_loop *hlsl_ir_loop(const struct hlsl_ir_node *node)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2024-08-02 01:32:58 -07:00
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_LOOP);
|
2021-01-27 08:29:44 -08:00
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_loop, node);
|
|
|
|
}
|
|
|
|
|
2021-10-07 19:58:57 -07:00
|
|
|
static inline struct hlsl_ir_resource_load *hlsl_ir_resource_load(const struct hlsl_ir_node *node)
|
|
|
|
{
|
2024-08-02 01:32:58 -07:00
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_RESOURCE_LOAD);
|
2021-10-07 19:58:57 -07:00
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_resource_load, node);
|
|
|
|
}
|
|
|
|
|
2021-08-15 10:08:32 -07:00
|
|
|
static inline struct hlsl_ir_resource_store *hlsl_ir_resource_store(const struct hlsl_ir_node *node)
|
|
|
|
{
|
2024-08-02 01:32:58 -07:00
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_RESOURCE_STORE);
|
2021-08-15 10:08:32 -07:00
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_resource_store, node);
|
|
|
|
}
|
|
|
|
|
2021-04-08 21:38:22 -07:00
|
|
|
static inline struct hlsl_ir_store *hlsl_ir_store(const struct hlsl_ir_node *node)
|
|
|
|
{
|
2024-08-02 01:32:58 -07:00
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_STORE);
|
2021-04-08 21:38:22 -07:00
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_store, node);
|
|
|
|
}
|
|
|
|
|
2021-02-02 14:11:14 -08:00
|
|
|
static inline struct hlsl_ir_swizzle *hlsl_ir_swizzle(const struct hlsl_ir_node *node)
|
2021-01-27 08:29:44 -08:00
|
|
|
{
|
2024-08-02 01:32:58 -07:00
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_SWIZZLE);
|
2021-01-27 08:29:44 -08:00
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_swizzle, node);
|
|
|
|
}
|
|
|
|
|
2023-02-24 11:39:56 -08:00
|
|
|
static inline struct hlsl_ir_index *hlsl_ir_index(const struct hlsl_ir_node *node)
|
|
|
|
{
|
2024-08-02 01:32:58 -07:00
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_INDEX);
|
2023-02-24 11:39:56 -08:00
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_index, node);
|
|
|
|
}
|
|
|
|
|
2023-10-11 04:51:51 -07:00
|
|
|
static inline struct hlsl_ir_switch *hlsl_ir_switch(const struct hlsl_ir_node *node)
|
|
|
|
{
|
2024-08-02 01:32:58 -07:00
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_SWITCH);
|
2023-10-11 04:51:51 -07:00
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_switch, node);
|
|
|
|
}
|
|
|
|
|
2024-03-18 11:31:04 -07:00
|
|
|
static inline struct hlsl_ir_stateblock_constant *hlsl_ir_stateblock_constant(const struct hlsl_ir_node *node)
|
|
|
|
{
|
2024-08-02 01:32:58 -07:00
|
|
|
VKD3D_ASSERT(node->type == HLSL_IR_STATEBLOCK_CONSTANT);
|
2024-03-18 11:31:04 -07:00
|
|
|
return CONTAINING_RECORD(node, struct hlsl_ir_stateblock_constant, node);
|
|
|
|
}
|
|
|
|
|
2023-02-16 15:52:15 -08:00
|
|
|
static inline void hlsl_block_init(struct hlsl_block *block)
|
|
|
|
{
|
|
|
|
list_init(&block->instrs);
|
|
|
|
}
|
|
|
|
|
2022-11-11 19:13:50 -08:00
|
|
|
static inline void hlsl_block_add_instr(struct hlsl_block *block, struct hlsl_ir_node *instr)
|
|
|
|
{
|
|
|
|
list_add_tail(&block->instrs, &instr->entry);
|
|
|
|
}
|
|
|
|
|
2023-02-16 16:00:01 -08:00
|
|
|
static inline void hlsl_block_add_block(struct hlsl_block *block, struct hlsl_block *add)
|
|
|
|
{
|
|
|
|
list_move_tail(&block->instrs, &add->instrs);
|
|
|
|
}
|
|
|
|
|
2021-01-27 08:29:44 -08:00
|
|
|
static inline void hlsl_src_from_node(struct hlsl_src *src, struct hlsl_ir_node *node)
|
|
|
|
{
|
|
|
|
src->node = node;
|
|
|
|
if (node)
|
|
|
|
list_add_tail(&node->uses, &src->entry);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void hlsl_src_remove(struct hlsl_src *src)
|
|
|
|
{
|
|
|
|
if (src->node)
|
|
|
|
list_remove(&src->entry);
|
|
|
|
src->node = NULL;
|
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
static inline void *hlsl_alloc(struct hlsl_ctx *ctx, size_t size)
|
|
|
|
{
|
|
|
|
void *ptr = vkd3d_calloc(1, size);
|
|
|
|
|
|
|
|
if (!ptr)
|
2021-05-20 22:32:23 -07:00
|
|
|
ctx->result = VKD3D_ERROR_OUT_OF_MEMORY;
|
2021-05-20 22:32:20 -07:00
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2023-05-03 12:39:58 -07:00
|
|
|
static inline void *hlsl_calloc(struct hlsl_ctx *ctx, size_t count, size_t size)
|
|
|
|
{
|
|
|
|
void *ptr = vkd3d_calloc(count, size);
|
|
|
|
|
|
|
|
if (!ptr)
|
|
|
|
ctx->result = VKD3D_ERROR_OUT_OF_MEMORY;
|
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
2021-09-20 14:40:11 -07:00
|
|
|
static inline void *hlsl_realloc(struct hlsl_ctx *ctx, void *ptr, size_t size)
|
|
|
|
{
|
|
|
|
void *ret = vkd3d_realloc(ptr, size);
|
|
|
|
|
|
|
|
if (!ret)
|
|
|
|
ctx->result = VKD3D_ERROR_OUT_OF_MEMORY;
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:20 -07:00
|
|
|
static inline char *hlsl_strdup(struct hlsl_ctx *ctx, const char *string)
|
|
|
|
{
|
|
|
|
char *ptr = vkd3d_strdup(string);
|
|
|
|
|
|
|
|
if (!ptr)
|
2021-05-20 22:32:23 -07:00
|
|
|
ctx->result = VKD3D_ERROR_OUT_OF_MEMORY;
|
2021-05-20 22:32:20 -07:00
|
|
|
return ptr;
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline bool hlsl_array_reserve(struct hlsl_ctx *ctx, void **elements,
|
|
|
|
size_t *capacity, size_t element_count, size_t element_size)
|
|
|
|
{
|
|
|
|
bool ret = vkd3d_array_reserve(elements, capacity, element_count, element_size);
|
|
|
|
|
|
|
|
if (!ret)
|
2021-05-20 22:32:23 -07:00
|
|
|
ctx->result = VKD3D_ERROR_OUT_OF_MEMORY;
|
2021-05-20 22:32:20 -07:00
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2021-05-20 22:32:21 -07:00
|
|
|
static inline struct vkd3d_string_buffer *hlsl_get_string_buffer(struct hlsl_ctx *ctx)
|
|
|
|
{
|
|
|
|
struct vkd3d_string_buffer *ret = vkd3d_string_buffer_get(&ctx->string_buffers);
|
|
|
|
|
|
|
|
if (!ret)
|
2021-05-20 22:32:23 -07:00
|
|
|
ctx->result = VKD3D_ERROR_OUT_OF_MEMORY;
|
2021-05-20 22:32:21 -07:00
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void hlsl_release_string_buffer(struct hlsl_ctx *ctx, struct vkd3d_string_buffer *buffer)
|
|
|
|
{
|
|
|
|
vkd3d_string_buffer_release(&ctx->string_buffers, buffer);
|
|
|
|
}
|
|
|
|
|
2021-10-25 00:06:25 -07:00
|
|
|
static inline struct hlsl_type *hlsl_get_scalar_type(const struct hlsl_ctx *ctx, enum hlsl_base_type base_type)
|
|
|
|
{
|
|
|
|
return ctx->builtin_types.scalar[base_type];
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline struct hlsl_type *hlsl_get_vector_type(const struct hlsl_ctx *ctx, enum hlsl_base_type base_type,
|
|
|
|
unsigned int dimx)
|
|
|
|
{
|
|
|
|
return ctx->builtin_types.vector[base_type][dimx - 1];
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline struct hlsl_type *hlsl_get_matrix_type(const struct hlsl_ctx *ctx, enum hlsl_base_type base_type,
|
|
|
|
unsigned int dimx, unsigned int dimy)
|
|
|
|
{
|
|
|
|
return ctx->builtin_types.matrix[base_type][dimx - 1][dimy - 1];
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline struct hlsl_type *hlsl_get_numeric_type(const struct hlsl_ctx *ctx, enum hlsl_type_class type,
|
|
|
|
enum hlsl_base_type base_type, unsigned int dimx, unsigned int dimy)
|
|
|
|
{
|
|
|
|
if (type == HLSL_CLASS_SCALAR)
|
|
|
|
return hlsl_get_scalar_type(ctx, base_type);
|
|
|
|
else if (type == HLSL_CLASS_VECTOR)
|
|
|
|
return hlsl_get_vector_type(ctx, base_type, dimx);
|
|
|
|
else
|
|
|
|
return hlsl_get_matrix_type(ctx, base_type, dimx, dimy);
|
|
|
|
}
|
|
|
|
|
2023-11-12 17:05:50 -08:00
|
|
|
static inline bool hlsl_is_numeric_type(const struct hlsl_type *type)
|
|
|
|
{
|
|
|
|
return type->class <= HLSL_CLASS_LAST_NUMERIC;
|
|
|
|
}
|
|
|
|
|
2022-01-27 10:31:22 -08:00
|
|
|
static inline unsigned int hlsl_sampler_dim_count(enum hlsl_sampler_dim dim)
|
|
|
|
{
|
|
|
|
switch (dim)
|
|
|
|
{
|
|
|
|
case HLSL_SAMPLER_DIM_1D:
|
2023-05-02 18:47:05 -07:00
|
|
|
case HLSL_SAMPLER_DIM_BUFFER:
|
2023-05-16 03:55:31 -07:00
|
|
|
case HLSL_SAMPLER_DIM_STRUCTURED_BUFFER:
|
2022-01-27 10:31:22 -08:00
|
|
|
return 1;
|
2022-01-27 10:31:23 -08:00
|
|
|
case HLSL_SAMPLER_DIM_1DARRAY:
|
2022-01-27 10:31:22 -08:00
|
|
|
case HLSL_SAMPLER_DIM_2D:
|
2022-01-27 10:31:23 -08:00
|
|
|
case HLSL_SAMPLER_DIM_2DMS:
|
2022-01-27 10:31:22 -08:00
|
|
|
return 2;
|
2022-01-27 10:31:23 -08:00
|
|
|
case HLSL_SAMPLER_DIM_2DARRAY:
|
|
|
|
case HLSL_SAMPLER_DIM_2DMSARRAY:
|
2022-01-27 10:31:22 -08:00
|
|
|
case HLSL_SAMPLER_DIM_3D:
|
|
|
|
case HLSL_SAMPLER_DIM_CUBE:
|
|
|
|
return 3;
|
2022-01-27 10:31:23 -08:00
|
|
|
case HLSL_SAMPLER_DIM_CUBEARRAY:
|
|
|
|
return 4;
|
2022-01-27 10:31:22 -08:00
|
|
|
default:
|
2022-08-31 04:25:24 -07:00
|
|
|
vkd3d_unreachable();
|
2022-01-27 10:31:22 -08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-02-06 06:11:47 -08:00
|
|
|
static inline bool hlsl_var_has_buffer_offset_register_reservation(struct hlsl_ctx *ctx, const struct hlsl_ir_var *var)
|
|
|
|
{
|
|
|
|
return var->reg_reservation.reg_type == 'c' && var->buffer == ctx->globals_buffer;
|
|
|
|
}
|
|
|
|
|
2023-08-07 14:45:31 -07:00
|
|
|
char *hlsl_sprintf_alloc(struct hlsl_ctx *ctx, const char *fmt, ...) VKD3D_PRINTF_FUNC(2, 3);
|
|
|
|
|
2021-08-13 07:03:24 -07:00
|
|
|
const char *debug_hlsl_expr_op(enum hlsl_ir_expr_op op);
|
2021-08-08 23:11:49 -07:00
|
|
|
const char *debug_hlsl_type(struct hlsl_ctx *ctx, const struct hlsl_type *type);
|
|
|
|
const char *debug_hlsl_writemask(unsigned int writemask);
|
2021-12-01 08:14:50 -08:00
|
|
|
const char *debug_hlsl_swizzle(unsigned int swizzle, unsigned int count);
|
2021-02-12 12:38:50 -08:00
|
|
|
|
2021-08-08 23:11:49 -07:00
|
|
|
struct vkd3d_string_buffer *hlsl_type_to_string(struct hlsl_ctx *ctx, const struct hlsl_type *type);
|
2023-05-29 16:51:10 -07:00
|
|
|
struct vkd3d_string_buffer *hlsl_component_to_string(struct hlsl_ctx *ctx, const struct hlsl_ir_var *var,
|
|
|
|
unsigned int index);
|
2021-08-08 23:11:49 -07:00
|
|
|
struct vkd3d_string_buffer *hlsl_modifiers_to_string(struct hlsl_ctx *ctx, unsigned int modifiers);
|
|
|
|
const char *hlsl_node_type_to_string(enum hlsl_ir_node_type type);
|
2021-02-02 14:11:14 -08:00
|
|
|
|
2023-03-06 18:37:41 -08:00
|
|
|
struct hlsl_ir_node *hlsl_add_conditional(struct hlsl_ctx *ctx, struct hlsl_block *block,
|
2023-02-12 09:02:31 -08:00
|
|
|
struct hlsl_ir_node *condition, struct hlsl_ir_node *if_true, struct hlsl_ir_node *if_false);
|
2022-11-10 17:30:03 -08:00
|
|
|
void hlsl_add_function(struct hlsl_ctx *ctx, char *name, struct hlsl_ir_function_decl *decl);
|
2021-08-08 23:11:49 -07:00
|
|
|
bool hlsl_add_var(struct hlsl_ctx *ctx, struct hlsl_ir_var *decl, bool local_var);
|
2021-02-02 14:11:14 -08:00
|
|
|
|
2023-04-14 11:02:15 -07:00
|
|
|
void hlsl_block_cleanup(struct hlsl_block *block);
|
2021-09-11 14:56:04 -07:00
|
|
|
bool hlsl_clone_block(struct hlsl_ctx *ctx, struct hlsl_block *dst_block, const struct hlsl_block *src_block);
|
|
|
|
|
2021-08-08 23:11:49 -07:00
|
|
|
void hlsl_dump_function(struct hlsl_ctx *ctx, const struct hlsl_ir_function_decl *func);
|
2024-04-08 13:15:20 -07:00
|
|
|
void hlsl_dump_var_default_values(const struct hlsl_ir_var *var);
|
2021-02-02 14:11:14 -08:00
|
|
|
|
2024-06-06 14:50:05 -07:00
|
|
|
bool hlsl_validate_state_block_entry(struct hlsl_ctx *ctx, struct hlsl_state_block_entry *entry,
|
|
|
|
const struct vkd3d_shader_location *loc);
|
2024-07-20 02:10:06 -07:00
|
|
|
struct hlsl_state_block_entry *clone_stateblock_entry(struct hlsl_ctx *ctx,
|
|
|
|
struct hlsl_state_block_entry *src, const char *name, bool lhs_has_index,
|
|
|
|
unsigned int lhs_index, unsigned int arg_index);
|
2024-06-06 14:50:05 -07:00
|
|
|
|
2024-04-26 09:15:52 -07:00
|
|
|
void hlsl_run_const_passes(struct hlsl_ctx *ctx, struct hlsl_block *body);
|
2022-02-28 03:23:43 -08:00
|
|
|
int hlsl_emit_bytecode(struct hlsl_ctx *ctx, struct hlsl_ir_function_decl *entry_func,
|
|
|
|
enum vkd3d_shader_target_type target_type, struct vkd3d_shader_code *out);
|
2023-11-05 13:07:01 -08:00
|
|
|
int hlsl_emit_effect_binary(struct hlsl_ctx *ctx, struct vkd3d_shader_code *out);
|
2021-03-02 13:34:46 -08:00
|
|
|
|
2023-03-10 17:09:58 -08:00
|
|
|
bool hlsl_init_deref_from_index_chain(struct hlsl_ctx *ctx, struct hlsl_deref *deref, struct hlsl_ir_node *chain);
|
2022-09-26 16:19:54 -07:00
|
|
|
bool hlsl_copy_deref(struct hlsl_ctx *ctx, struct hlsl_deref *deref, const struct hlsl_deref *other);
|
2023-01-31 17:44:46 -08:00
|
|
|
|
2022-06-30 15:20:20 -07:00
|
|
|
void hlsl_cleanup_deref(struct hlsl_deref *deref);
|
2024-05-31 03:25:12 -07:00
|
|
|
|
2023-01-31 17:44:46 -08:00
|
|
|
void hlsl_cleanup_semantic(struct hlsl_semantic *semantic);
|
2024-05-31 03:25:12 -07:00
|
|
|
bool hlsl_clone_semantic(struct hlsl_ctx *ctx, struct hlsl_semantic *dst, const struct hlsl_semantic *src);
|
2022-06-30 15:20:20 -07:00
|
|
|
|
2023-10-11 04:51:51 -07:00
|
|
|
void hlsl_cleanup_ir_switch_cases(struct list *cases);
|
|
|
|
void hlsl_free_ir_switch_case(struct hlsl_ir_switch_case *c);
|
|
|
|
|
2022-02-10 19:48:18 -08:00
|
|
|
void hlsl_replace_node(struct hlsl_ir_node *old, struct hlsl_ir_node *new);
|
|
|
|
|
2021-08-16 12:52:10 -07:00
|
|
|
void hlsl_free_attribute(struct hlsl_attribute *attr);
|
2021-08-08 23:11:49 -07:00
|
|
|
void hlsl_free_instr(struct hlsl_ir_node *node);
|
|
|
|
void hlsl_free_instr_list(struct list *list);
|
2024-03-15 15:01:34 -07:00
|
|
|
void hlsl_free_state_block(struct hlsl_state_block *state_block);
|
2024-07-20 02:10:06 -07:00
|
|
|
void hlsl_free_state_block_entry(struct hlsl_state_block_entry *state_block_entry);
|
2021-08-08 23:11:49 -07:00
|
|
|
void hlsl_free_type(struct hlsl_type *type);
|
|
|
|
void hlsl_free_var(struct hlsl_ir_var *decl);
|
2021-02-02 14:11:14 -08:00
|
|
|
|
2021-09-11 16:18:50 -07:00
|
|
|
struct hlsl_ir_function *hlsl_get_function(struct hlsl_ctx *ctx, const char *name);
|
2023-09-08 14:21:47 -07:00
|
|
|
struct hlsl_ir_function_decl *hlsl_get_first_func_decl(struct hlsl_ctx *ctx, const char *name);
|
vkd3d-shader/hlsl: Store function overloads in a list.
The choice to store them in an rbtree was made early on. It does not seem likely
that HLSL programs would define many overloads for any of their functions, but I
suspect the idea was rather that intrinsics would be defined as plain
hlsl_ir_function_decl structures [cf. 447463e5900ca6a636998a65429b8a08a5441657]
and that some intrinsics that could operate on any type would therefore need
many overrides.
This is not how we deal with intrinsics, however. When the first intrinsics were
implemented I made the choice disregard this intended design, and instead match
and convert their types manually, in C. Nothing that has happened in the time
since has led me to question that choice, and in fact, the flexibility with
which we must accommodate functions has led me to believe that matching in this
way was definitely the right choice. The main other designs I see would have
been:
* define each intrinsic variant separately using existing HLSL types. Besides
efficiency concerns (i.e. this would take more space in memory, and would take
longer to generate each variant), the normal type-matching rules don't really
apply to intrinsics.
[For example: elementwise intrinsics like abs() return the same type as the
input, including preserving the distinction between float and float1. It is
legal to define separate HLSL overloads taking float and float1, but trying to
invoke these functions yields an "ambiguous function call" error.]
* introduce new (semi-)generic types. This is far more code and ends up acting
like our current scheme (with helpers) in a slightly more complex form.
So I think we can go ahead and rip out this vestige of the original design for
intrinsics.
As for why to change it: rbtrees are simply more complex to deal with, and it
seems unlikely to me that the difference is going to matter. I do not expect any
program to define large quantities of intrinsics; linked list search should be
good enough.
2023-09-08 14:27:10 -07:00
|
|
|
struct hlsl_ir_function_decl *hlsl_get_func_decl(struct hlsl_ctx *ctx, const char *name,
|
|
|
|
const struct hlsl_func_parameters *parameters);
|
2023-10-21 12:35:08 -07:00
|
|
|
const struct hlsl_profile_info *hlsl_get_target_info(const char *target);
|
2023-02-18 16:44:20 -08:00
|
|
|
struct hlsl_type *hlsl_get_type(struct hlsl_scope *scope, const char *name, bool recursive, bool case_insensitive);
|
2021-08-08 23:11:49 -07:00
|
|
|
struct hlsl_ir_var *hlsl_get_var(struct hlsl_scope *scope, const char *name);
|
2021-02-02 14:11:14 -08:00
|
|
|
|
2022-08-16 09:33:51 -07:00
|
|
|
struct hlsl_type *hlsl_get_element_type_from_path_index(struct hlsl_ctx *ctx, const struct hlsl_type *type,
|
2022-06-30 10:16:37 -07:00
|
|
|
struct hlsl_ir_node *idx);
|
|
|
|
|
2021-09-09 23:53:17 -07:00
|
|
|
const char *hlsl_jump_type_to_string(enum hlsl_ir_jump_type type);
|
|
|
|
|
2021-08-08 23:11:49 -07:00
|
|
|
struct hlsl_type *hlsl_new_array_type(struct hlsl_ctx *ctx, struct hlsl_type *basic_type, unsigned int array_size);
|
2021-05-20 22:32:20 -07:00
|
|
|
struct hlsl_ir_node *hlsl_new_binary_expr(struct hlsl_ctx *ctx, enum hlsl_ir_expr_op op, struct hlsl_ir_node *arg1,
|
2021-08-08 23:11:49 -07:00
|
|
|
struct hlsl_ir_node *arg2);
|
2022-11-10 17:37:41 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_bool_constant(struct hlsl_ctx *ctx, bool b, const struct vkd3d_shader_location *loc);
|
2021-06-21 21:37:09 -07:00
|
|
|
struct hlsl_buffer *hlsl_new_buffer(struct hlsl_ctx *ctx, enum hlsl_buffer_type type, const char *name,
|
2024-03-27 18:22:15 -07:00
|
|
|
uint32_t modifiers, const struct hlsl_reg_reservation *reservation, struct hlsl_scope *annotations,
|
|
|
|
const struct vkd3d_shader_location *loc);
|
2023-01-16 11:14:09 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_call(struct hlsl_ctx *ctx, struct hlsl_ir_function_decl *decl,
|
2021-09-09 19:06:38 -07:00
|
|
|
const struct vkd3d_shader_location *loc);
|
2022-11-10 17:39:42 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_cast(struct hlsl_ctx *ctx, struct hlsl_ir_node *node, struct hlsl_type *type,
|
2021-10-07 19:58:57 -07:00
|
|
|
const struct vkd3d_shader_location *loc);
|
2022-11-11 17:13:26 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_constant(struct hlsl_ctx *ctx, struct hlsl_type *type,
|
2022-11-11 17:10:14 -08:00
|
|
|
const struct hlsl_constant_value *value, const struct vkd3d_shader_location *loc);
|
2022-11-10 17:39:42 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_copy(struct hlsl_ctx *ctx, struct hlsl_ir_node *node);
|
2022-09-26 17:00:09 -07:00
|
|
|
struct hlsl_ir_node *hlsl_new_expr(struct hlsl_ctx *ctx, enum hlsl_ir_expr_op op,
|
|
|
|
struct hlsl_ir_node *operands[HLSL_MAX_OPERANDS],
|
|
|
|
struct hlsl_type *data_type, const struct vkd3d_shader_location *loc);
|
2022-11-10 17:45:51 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_float_constant(struct hlsl_ctx *ctx,
|
2022-09-26 16:50:04 -07:00
|
|
|
float f, const struct vkd3d_shader_location *loc);
|
2023-01-31 17:27:01 -08:00
|
|
|
struct hlsl_ir_function_decl *hlsl_new_func_decl(struct hlsl_ctx *ctx,
|
|
|
|
struct hlsl_type *return_type, const struct hlsl_func_parameters *parameters,
|
|
|
|
const struct hlsl_semantic *semantic, const struct vkd3d_shader_location *loc);
|
2022-11-10 18:05:53 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_if(struct hlsl_ctx *ctx, struct hlsl_ir_node *condition,
|
2022-11-10 18:04:22 -08:00
|
|
|
struct hlsl_block *then_block, struct hlsl_block *else_block, const struct vkd3d_shader_location *loc);
|
2022-11-10 18:06:59 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_int_constant(struct hlsl_ctx *ctx, int32_t n, const struct vkd3d_shader_location *loc);
|
2022-11-10 18:08:44 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_jump(struct hlsl_ctx *ctx,
|
2023-06-08 00:42:58 -07:00
|
|
|
enum hlsl_ir_jump_type type, struct hlsl_ir_node *condition, const struct vkd3d_shader_location *loc);
|
2023-11-01 13:07:46 -07:00
|
|
|
struct hlsl_ir_node *hlsl_new_ternary_expr(struct hlsl_ctx *ctx, enum hlsl_ir_expr_op op,
|
|
|
|
struct hlsl_ir_node *arg1, struct hlsl_ir_node *arg2, struct hlsl_ir_node *arg3);
|
2022-06-30 15:20:20 -07:00
|
|
|
|
|
|
|
void hlsl_init_simple_deref_from_var(struct hlsl_deref *deref, struct hlsl_ir_var *var);
|
|
|
|
|
|
|
|
struct hlsl_ir_load *hlsl_new_var_load(struct hlsl_ctx *ctx, struct hlsl_ir_var *var,
|
2023-04-14 00:02:14 -07:00
|
|
|
const struct vkd3d_shader_location *loc);
|
2022-06-30 15:20:20 -07:00
|
|
|
struct hlsl_ir_load *hlsl_new_load_index(struct hlsl_ctx *ctx, const struct hlsl_deref *deref,
|
|
|
|
struct hlsl_ir_node *idx, const struct vkd3d_shader_location *loc);
|
2023-05-08 15:25:18 -07:00
|
|
|
struct hlsl_ir_load *hlsl_new_load_parent(struct hlsl_ctx *ctx, const struct hlsl_deref *deref,
|
|
|
|
const struct vkd3d_shader_location *loc);
|
2022-11-10 18:19:39 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_load_component(struct hlsl_ctx *ctx, struct hlsl_block *block,
|
2022-06-30 15:20:20 -07:00
|
|
|
const struct hlsl_deref *deref, unsigned int comp, const struct vkd3d_shader_location *loc);
|
2022-11-14 18:44:44 -08:00
|
|
|
struct hlsl_ir_node *hlsl_add_load_component(struct hlsl_ctx *ctx, struct hlsl_block *block,
|
2023-06-08 03:47:40 -07:00
|
|
|
struct hlsl_ir_node *var_instr, unsigned int comp, const struct vkd3d_shader_location *loc);
|
2022-06-30 15:20:20 -07:00
|
|
|
|
2022-11-10 18:55:03 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_simple_store(struct hlsl_ctx *ctx, struct hlsl_ir_var *lhs, struct hlsl_ir_node *rhs);
|
2022-11-10 18:57:00 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_store_index(struct hlsl_ctx *ctx, const struct hlsl_deref *lhs,
|
2022-07-13 13:56:21 -07:00
|
|
|
struct hlsl_ir_node *idx, struct hlsl_ir_node *rhs, unsigned int writemask, const struct vkd3d_shader_location *loc);
|
2022-11-10 18:42:25 -08:00
|
|
|
bool hlsl_new_store_component(struct hlsl_ctx *ctx, struct hlsl_block *block,
|
2022-07-14 12:11:51 -07:00
|
|
|
const struct hlsl_deref *lhs, unsigned int comp, struct hlsl_ir_node *rhs);
|
2022-06-30 15:20:20 -07:00
|
|
|
|
2023-03-10 17:09:58 -08:00
|
|
|
bool hlsl_index_is_noncontiguous(struct hlsl_ir_index *index);
|
2023-03-10 12:23:49 -08:00
|
|
|
bool hlsl_index_is_resource_access(struct hlsl_ir_index *index);
|
2024-02-16 12:05:49 -08:00
|
|
|
bool hlsl_index_chain_has_resource_access(struct hlsl_ir_index *index);
|
2023-03-10 17:09:58 -08:00
|
|
|
|
2023-02-24 11:39:56 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_index(struct hlsl_ctx *ctx, struct hlsl_ir_node *val,
|
|
|
|
struct hlsl_ir_node *idx, const struct vkd3d_shader_location *loc);
|
2022-11-10 18:36:14 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_loop(struct hlsl_ctx *ctx,
|
2024-04-10 10:13:51 -07:00
|
|
|
struct hlsl_block *block, enum hlsl_ir_loop_unroll_type unroll_type, unsigned int unroll_limit, const struct vkd3d_shader_location *loc);
|
2022-11-10 18:38:31 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_resource_load(struct hlsl_ctx *ctx,
|
2022-09-26 16:20:22 -07:00
|
|
|
const struct hlsl_resource_load_params *params, const struct vkd3d_shader_location *loc);
|
2022-11-10 18:39:11 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_resource_store(struct hlsl_ctx *ctx, const struct hlsl_deref *resource,
|
2021-08-15 10:08:32 -07:00
|
|
|
struct hlsl_ir_node *coords, struct hlsl_ir_node *value, const struct vkd3d_shader_location *loc);
|
2022-07-14 18:23:43 -07:00
|
|
|
struct hlsl_type *hlsl_new_struct_type(struct hlsl_ctx *ctx, const char *name,
|
|
|
|
struct hlsl_struct_field *fields, size_t field_count);
|
2023-12-06 09:20:25 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_swizzle(struct hlsl_ctx *ctx, uint32_t s, unsigned int components,
|
2021-12-01 08:14:58 -08:00
|
|
|
struct hlsl_ir_node *val, const struct vkd3d_shader_location *loc);
|
2024-03-18 11:31:04 -07:00
|
|
|
struct hlsl_ir_node *hlsl_new_stateblock_constant(struct hlsl_ctx *ctx, const char *name,
|
|
|
|
struct vkd3d_shader_location *loc);
|
2024-06-14 16:59:21 -07:00
|
|
|
struct hlsl_ir_node *hlsl_new_string_constant(struct hlsl_ctx *ctx, const char *str,
|
|
|
|
const struct vkd3d_shader_location *loc);
|
2022-09-26 15:51:36 -07:00
|
|
|
struct hlsl_ir_var *hlsl_new_synthetic_var(struct hlsl_ctx *ctx, const char *template,
|
|
|
|
struct hlsl_type *type, const struct vkd3d_shader_location *loc);
|
2023-05-16 08:35:14 -07:00
|
|
|
struct hlsl_ir_var *hlsl_new_synthetic_var_named(struct hlsl_ctx *ctx, const char *name,
|
|
|
|
struct hlsl_type *type, const struct vkd3d_shader_location *loc, bool dummy_scope);
|
2023-03-04 10:46:12 -08:00
|
|
|
struct hlsl_type *hlsl_new_texture_type(struct hlsl_ctx *ctx, enum hlsl_sampler_dim dim, struct hlsl_type *format,
|
|
|
|
unsigned int sample_count);
|
2024-02-19 03:52:31 -08:00
|
|
|
struct hlsl_type *hlsl_new_uav_type(struct hlsl_ctx *ctx, enum hlsl_sampler_dim dim,
|
|
|
|
struct hlsl_type *format, bool rasteriser_ordered);
|
2024-05-27 15:31:51 -07:00
|
|
|
struct hlsl_type *hlsl_new_cb_type(struct hlsl_ctx *ctx, struct hlsl_type *format);
|
2022-11-10 19:06:04 -08:00
|
|
|
struct hlsl_ir_node *hlsl_new_uint_constant(struct hlsl_ctx *ctx, unsigned int n,
|
2022-04-05 03:33:10 -07:00
|
|
|
const struct vkd3d_shader_location *loc);
|
2024-07-23 06:30:27 -07:00
|
|
|
struct hlsl_ir_node *hlsl_new_null_constant(struct hlsl_ctx *ctx, const struct vkd3d_shader_location *loc);
|
2021-05-20 22:32:20 -07:00
|
|
|
struct hlsl_ir_node *hlsl_new_unary_expr(struct hlsl_ctx *ctx, enum hlsl_ir_expr_op op, struct hlsl_ir_node *arg,
|
2023-04-14 00:02:14 -07:00
|
|
|
const struct vkd3d_shader_location *loc);
|
2021-05-20 22:32:20 -07:00
|
|
|
struct hlsl_ir_var *hlsl_new_var(struct hlsl_ctx *ctx, const char *name, struct hlsl_type *type,
|
2023-12-06 10:36:35 -08:00
|
|
|
const struct vkd3d_shader_location *loc, const struct hlsl_semantic *semantic, uint32_t modifiers,
|
2021-08-08 23:11:49 -07:00
|
|
|
const struct hlsl_reg_reservation *reg_reservation);
|
2023-10-11 04:51:51 -07:00
|
|
|
struct hlsl_ir_switch_case *hlsl_new_switch_case(struct hlsl_ctx *ctx, unsigned int value, bool is_default,
|
|
|
|
struct hlsl_block *body, const struct vkd3d_shader_location *loc);
|
|
|
|
struct hlsl_ir_node *hlsl_new_switch(struct hlsl_ctx *ctx, struct hlsl_ir_node *selector,
|
|
|
|
struct list *cases, const struct vkd3d_shader_location *loc);
|
2021-02-02 14:11:14 -08:00
|
|
|
|
2021-12-01 08:14:57 -08:00
|
|
|
void hlsl_error(struct hlsl_ctx *ctx, const struct vkd3d_shader_location *loc,
|
2021-08-08 23:11:49 -07:00
|
|
|
enum vkd3d_shader_error error, const char *fmt, ...) VKD3D_PRINTF_FUNC(4, 5);
|
2021-12-01 08:14:57 -08:00
|
|
|
void hlsl_fixme(struct hlsl_ctx *ctx, const struct vkd3d_shader_location *loc,
|
2021-08-13 07:03:24 -07:00
|
|
|
const char *fmt, ...) VKD3D_PRINTF_FUNC(3, 4);
|
2021-12-01 08:14:57 -08:00
|
|
|
void hlsl_warning(struct hlsl_ctx *ctx, const struct vkd3d_shader_location *loc,
|
2021-08-08 23:11:49 -07:00
|
|
|
enum vkd3d_shader_error error, const char *fmt, ...) VKD3D_PRINTF_FUNC(4, 5);
|
2021-12-01 08:14:57 -08:00
|
|
|
void hlsl_note(struct hlsl_ctx *ctx, const struct vkd3d_shader_location *loc,
|
2021-08-08 23:11:49 -07:00
|
|
|
enum vkd3d_shader_log_level level, const char *fmt, ...) VKD3D_PRINTF_FUNC(4, 5);
|
2021-02-02 14:11:14 -08:00
|
|
|
|
2021-08-08 23:11:49 -07:00
|
|
|
void hlsl_push_scope(struct hlsl_ctx *ctx);
|
|
|
|
void hlsl_pop_scope(struct hlsl_ctx *ctx);
|
2021-02-02 14:11:14 -08:00
|
|
|
|
2021-08-08 23:11:49 -07:00
|
|
|
bool hlsl_scope_add_type(struct hlsl_scope *scope, struct hlsl_type *type);
|
2021-02-04 14:33:50 -08:00
|
|
|
|
2021-02-04 14:33:53 -08:00
|
|
|
struct hlsl_type *hlsl_type_clone(struct hlsl_ctx *ctx, struct hlsl_type *old,
|
2023-12-06 10:36:35 -08:00
|
|
|
unsigned int default_majority, uint32_t modifiers);
|
2022-06-30 15:20:20 -07:00
|
|
|
unsigned int hlsl_type_component_count(const struct hlsl_type *type);
|
2022-10-28 08:23:05 -07:00
|
|
|
unsigned int hlsl_type_get_array_element_reg_size(const struct hlsl_type *type, enum hlsl_regset regset);
|
2022-06-30 15:20:20 -07:00
|
|
|
struct hlsl_type *hlsl_type_get_component_type(struct hlsl_ctx *ctx, struct hlsl_type *type,
|
|
|
|
unsigned int index);
|
2023-05-29 16:51:10 -07:00
|
|
|
unsigned int hlsl_type_get_component_offset(struct hlsl_ctx *ctx, struct hlsl_type *type,
|
2023-06-09 13:55:21 -07:00
|
|
|
unsigned int index, enum hlsl_regset *regset);
|
2022-06-30 10:16:37 -07:00
|
|
|
bool hlsl_type_is_row_major(const struct hlsl_type *type);
|
2022-06-30 13:25:12 -07:00
|
|
|
unsigned int hlsl_type_minor_size(const struct hlsl_type *type);
|
|
|
|
unsigned int hlsl_type_major_size(const struct hlsl_type *type);
|
2022-07-20 14:42:13 -07:00
|
|
|
unsigned int hlsl_type_element_count(const struct hlsl_type *type);
|
2022-10-28 08:23:05 -07:00
|
|
|
bool hlsl_type_is_resource(const struct hlsl_type *type);
|
2021-08-08 23:11:49 -07:00
|
|
|
unsigned int hlsl_type_get_sm4_offset(const struct hlsl_type *type, unsigned int offset);
|
|
|
|
bool hlsl_types_are_equal(const struct hlsl_type *t1, const struct hlsl_type *t2);
|
2021-01-27 08:29:44 -08:00
|
|
|
|
2024-02-06 06:11:47 -08:00
|
|
|
void hlsl_calculate_buffer_offsets(struct hlsl_ctx *ctx);
|
|
|
|
|
2023-04-24 17:41:15 -07:00
|
|
|
const struct hlsl_type *hlsl_get_multiarray_element_type(const struct hlsl_type *type);
|
|
|
|
unsigned int hlsl_get_multiarray_size(const struct hlsl_type *type);
|
|
|
|
|
2023-12-06 09:20:25 -08:00
|
|
|
uint32_t hlsl_combine_swizzles(uint32_t first, uint32_t second, unsigned int dim);
|
2021-08-09 19:56:17 -07:00
|
|
|
unsigned int hlsl_combine_writemasks(unsigned int first, unsigned int second);
|
2023-12-06 09:20:25 -08:00
|
|
|
uint32_t hlsl_map_swizzle(uint32_t swizzle, unsigned int writemask);
|
|
|
|
uint32_t hlsl_swizzle_from_writemask(unsigned int writemask);
|
2021-08-09 19:56:17 -07:00
|
|
|
|
2021-08-15 10:08:32 -07:00
|
|
|
struct hlsl_type *hlsl_deref_get_type(struct hlsl_ctx *ctx, const struct hlsl_deref *deref);
|
2023-06-12 08:58:07 -07:00
|
|
|
enum hlsl_regset hlsl_deref_get_regset(struct hlsl_ctx *ctx, const struct hlsl_deref *deref);
|
2022-07-20 12:37:07 -07:00
|
|
|
bool hlsl_component_index_range_from_deref(struct hlsl_ctx *ctx, const struct hlsl_deref *deref,
|
|
|
|
unsigned int *start, unsigned int *count);
|
2022-11-25 14:47:56 -08:00
|
|
|
bool hlsl_regset_index_from_deref(struct hlsl_ctx *ctx, const struct hlsl_deref *deref,
|
|
|
|
enum hlsl_regset regset, unsigned int *index);
|
2022-02-24 06:06:15 -08:00
|
|
|
bool hlsl_offset_from_deref(struct hlsl_ctx *ctx, const struct hlsl_deref *deref, unsigned int *offset);
|
2021-11-17 00:47:26 -08:00
|
|
|
unsigned int hlsl_offset_from_deref_safe(struct hlsl_ctx *ctx, const struct hlsl_deref *deref);
|
2022-03-10 07:14:05 -08:00
|
|
|
struct hlsl_reg hlsl_reg_from_deref(struct hlsl_ctx *ctx, const struct hlsl_deref *deref);
|
2021-08-09 19:56:17 -07:00
|
|
|
|
2023-06-09 06:39:38 -07:00
|
|
|
bool hlsl_copy_propagation_execute(struct hlsl_ctx *ctx, struct hlsl_block *block);
|
2022-06-27 08:22:38 -07:00
|
|
|
bool hlsl_fold_constant_exprs(struct hlsl_ctx *ctx, struct hlsl_ir_node *instr, void *context);
|
2024-04-19 09:57:38 -07:00
|
|
|
bool hlsl_fold_constant_identities(struct hlsl_ctx *ctx, struct hlsl_ir_node *instr, void *context);
|
2022-06-27 08:22:38 -07:00
|
|
|
bool hlsl_fold_constant_swizzles(struct hlsl_ctx *ctx, struct hlsl_ir_node *instr, void *context);
|
2023-04-25 06:04:29 -07:00
|
|
|
bool hlsl_transform_ir(struct hlsl_ctx *ctx, bool (*func)(struct hlsl_ctx *ctx, struct hlsl_ir_node *, void *),
|
|
|
|
struct hlsl_block *block, void *context);
|
2022-02-10 19:48:19 -08:00
|
|
|
|
2024-02-13 03:52:11 -08:00
|
|
|
D3DXPARAMETER_CLASS hlsl_sm1_class(const struct hlsl_type *type);
|
|
|
|
D3DXPARAMETER_TYPE hlsl_sm1_base_type(const struct hlsl_type *type);
|
2024-05-21 15:19:23 -07:00
|
|
|
bool hlsl_sm1_register_from_semantic(const struct vkd3d_shader_version *version, const char *semantic_name,
|
|
|
|
unsigned int semantic_index, bool output, enum vkd3d_shader_register_type *type, unsigned int *reg);
|
|
|
|
bool hlsl_sm1_usage_from_semantic(const char *semantic_name,
|
|
|
|
uint32_t semantic_index, D3DDECLUSAGE *usage, uint32_t *usage_idx);
|
2024-05-17 15:30:59 -07:00
|
|
|
|
2024-05-20 13:29:09 -07:00
|
|
|
void write_sm1_uniforms(struct hlsl_ctx *ctx, struct vkd3d_bytecode_buffer *buffer);
|
2024-05-17 15:30:59 -07:00
|
|
|
int d3dbc_compile(struct vsir_program *program, uint64_t config_flags,
|
2024-05-20 13:29:09 -07:00
|
|
|
const struct vkd3d_shader_compile_info *compile_info, const struct vkd3d_shader_code *ctab,
|
2024-05-17 15:30:59 -07:00
|
|
|
struct vkd3d_shader_code *out, struct vkd3d_shader_message_context *message_context,
|
|
|
|
struct hlsl_ctx *ctx, struct hlsl_ir_function_decl *entry_func);
|
2021-08-09 19:56:17 -07:00
|
|
|
|
2021-08-19 16:44:27 -07:00
|
|
|
bool hlsl_sm4_usage_from_semantic(struct hlsl_ctx *ctx,
|
|
|
|
const struct hlsl_semantic *semantic, bool output, D3D_NAME *usage);
|
|
|
|
bool hlsl_sm4_register_from_semantic(struct hlsl_ctx *ctx, const struct hlsl_semantic *semantic,
|
2023-07-18 15:59:43 -07:00
|
|
|
bool output, enum vkd3d_shader_register_type *type, bool *has_idx);
|
2021-08-17 10:38:57 -07:00
|
|
|
int hlsl_sm4_write(struct hlsl_ctx *ctx, struct hlsl_ir_function_decl *entry_func, struct vkd3d_shader_code *out);
|
|
|
|
|
2023-08-07 15:20:10 -07:00
|
|
|
struct hlsl_ir_function_decl *hlsl_compile_internal_function(struct hlsl_ctx *ctx, const char *name, const char *hlsl);
|
|
|
|
|
2021-08-08 23:11:49 -07:00
|
|
|
int hlsl_lexer_compile(struct hlsl_ctx *ctx, const struct vkd3d_shader_code *hlsl);
|
2021-01-27 08:29:44 -08:00
|
|
|
|
|
|
|
#endif
|