#include "xe_reg_sr.h"
#include "xe_rtp.h"
-#undef XE_REG
-#undef XE_REG_MCR
-#define XE_REG(x, ...) _XE_RTP_REG(x)
-#define XE_REG_MCR(x, ...) _XE_RTP_MCR_REG(x)
-
#define REGULAR_REG1 XE_REG(1)
#define REGULAR_REG2 XE_REG(2)
#define REGULAR_REG3 XE_REG(3)
#define MCR_REG1 XE_REG_MCR(1)
#define MCR_REG2 XE_REG_MCR(2)
#define MCR_REG3 XE_REG_MCR(3)
+#define MASKED_REG1 XE_REG(1, XE_REG_OPTION_MASKED)
+
+#undef XE_REG_MCR
+#define XE_REG_MCR(...) XE_REG(__VA_ARGS__, .mcr = 1)
struct rtp_test_case {
const char *name;
- struct {
- u32 offset;
- u32 type;
- } expected_reg;
- u32 expected_set_bits;
+ struct xe_reg expected_reg;
+ u32 expected_set_bits;
u32 expected_clr_bits;
unsigned long expected_count;
unsigned int expected_sr_errors;
static const struct rtp_test_case cases[] = {
{
.name = "coalesce-same-reg",
- .expected_reg = { REGULAR_REG1 },
+ .expected_reg = REGULAR_REG1,
.expected_set_bits = REG_BIT(0) | REG_BIT(1),
.expected_clr_bits = REG_BIT(0) | REG_BIT(1),
.expected_count = 1,
},
{
.name = "no-match-no-add",
- .expected_reg = { REGULAR_REG1 },
+ .expected_reg = REGULAR_REG1,
.expected_set_bits = REG_BIT(0),
.expected_clr_bits = REG_BIT(0),
.expected_count = 1,
},
{
.name = "no-match-no-add-multiple-rules",
- .expected_reg = { REGULAR_REG1 },
+ .expected_reg = REGULAR_REG1,
.expected_set_bits = REG_BIT(0),
.expected_clr_bits = REG_BIT(0),
.expected_count = 1,
},
{
.name = "two-regs-two-entries",
- .expected_reg = { REGULAR_REG1 },
+ .expected_reg = REGULAR_REG1,
.expected_set_bits = REG_BIT(0),
.expected_clr_bits = REG_BIT(0),
.expected_count = 2,
},
{
.name = "clr-one-set-other",
- .expected_reg = { REGULAR_REG1 },
+ .expected_reg = REGULAR_REG1,
.expected_set_bits = REG_BIT(0),
.expected_clr_bits = REG_BIT(1) | REG_BIT(0),
.expected_count = 1,
#define TEMP_MASK REG_GENMASK(10, 8)
#define TEMP_FIELD REG_FIELD_PREP(TEMP_MASK, 2)
.name = "set-field",
- .expected_reg = { REGULAR_REG1 },
+ .expected_reg = REGULAR_REG1,
.expected_set_bits = TEMP_FIELD,
.expected_clr_bits = TEMP_MASK,
.expected_count = 1,
},
{
.name = "conflict-duplicate",
- .expected_reg = { REGULAR_REG1 },
+ .expected_reg = REGULAR_REG1,
.expected_set_bits = REG_BIT(0),
.expected_clr_bits = REG_BIT(0),
.expected_count = 1,
},
{
.name = "conflict-not-disjoint",
- .expected_reg = { REGULAR_REG1 },
+ .expected_reg = REGULAR_REG1,
.expected_set_bits = REG_BIT(0),
.expected_clr_bits = REG_BIT(0),
.expected_count = 1,
},
{
.name = "conflict-reg-type",
- .expected_reg = { REGULAR_REG1 },
+ .expected_reg = REGULAR_REG1,
.expected_set_bits = REG_BIT(0),
.expected_clr_bits = REG_BIT(0),
.expected_count = 1,
/* drop: regular vs masked */
{ XE_RTP_NAME("basic-3"),
XE_RTP_RULES(FUNC(match_yes)),
- XE_RTP_ACTIONS(SET(REGULAR_REG1, REG_BIT(0),
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(MASKED_REG1, REG_BIT(0)))
},
{}
},
xe_rtp_process(param->entries, reg_sr, &xe->gt[0], NULL);
xa_for_each(®_sr->xa, idx, sre) {
- if (idx == param->expected_reg.offset)
+ if (idx == param->expected_reg.reg)
sr_entry = sre;
count++;
KUNIT_EXPECT_EQ(test, count, param->expected_count);
KUNIT_EXPECT_EQ(test, sr_entry->clr_bits, param->expected_clr_bits);
KUNIT_EXPECT_EQ(test, sr_entry->set_bits, param->expected_set_bits);
- KUNIT_EXPECT_EQ(test, sr_entry->reg_type, param->expected_reg.type);
+ KUNIT_EXPECT_EQ(test, sr_entry->reg.raw, param->expected_reg.raw);
KUNIT_EXPECT_EQ(test, reg_sr->errors, param->expected_sr_errors);
}
BUILD_BUG_ON(ARRAY_SIZE(extra_regs) > ADS_REGSET_EXTRA_MAX);
xa_for_each(&hwe->reg_sr.xa, idx, entry) {
- u32 flags = entry->masked_reg ? GUC_REGSET_MASKED : 0;
+ u32 flags = entry->reg.masked ? GUC_REGSET_MASKED : 0;
guc_mmio_regset_write_one(ads, regset_map, idx, flags, count++);
}
e1->clr_bits & e2->set_bits || e1->set_bits & e2->clr_bits)
return false;
- if (e1->masked_reg != e2->masked_reg)
- return false;
-
- if (e1->reg_type != e2->reg_type)
+ if (e1->reg.raw != e2->reg.raw)
return false;
return true;
#endif
}
-int xe_reg_sr_add(struct xe_reg_sr *sr, u32 reg,
+int xe_reg_sr_add(struct xe_reg_sr *sr,
const struct xe_reg_sr_entry *e)
{
- unsigned long idx = reg;
+ unsigned long idx = e->reg.reg;
struct xe_reg_sr_entry *pentry = xa_load(&sr->xa, idx);
int ret;
return 0;
fail:
- DRM_ERROR("Discarding save-restore reg %04lx (clear: %08x, set: %08x, masked: %s): ret=%d\n",
+ DRM_ERROR("Discarding save-restore reg %04lx (clear: %08x, set: %08x, masked: %s, mcr: %s): ret=%d\n",
idx, e->clr_bits, e->set_bits,
- str_yes_no(e->masked_reg), ret);
+ str_yes_no(e->reg.masked),
+ str_yes_no(e->reg.mcr),
+ ret);
reg_sr_inc_error(sr);
return ret;
}
-static void apply_one_mmio(struct xe_gt *gt, u32 reg,
- struct xe_reg_sr_entry *entry)
+/*
+ * Convert back from encoded value to type-safe, only to be used when reg.mcr
+ * is true
+ */
+static struct xe_reg_mcr to_xe_reg_mcr(const struct xe_reg reg)
+{
+ return (const struct xe_reg_mcr){.__reg.raw = reg.raw };
+}
+
+static void apply_one_mmio(struct xe_gt *gt, struct xe_reg_sr_entry *entry)
{
struct xe_device *xe = gt_to_xe(gt);
+ struct xe_reg reg = entry->reg;
+ struct xe_reg_mcr reg_mcr = to_xe_reg_mcr(reg);
u32 val;
/*
* When it's not masked, we have to read it from hardware, unless we are
* supposed to set all bits.
*/
- if (entry->masked_reg)
+ if (reg.masked)
val = (entry->clr_bits ?: entry->set_bits) << 16;
else if (entry->clr_bits + 1)
- val = (entry->reg_type == XE_RTP_REG_MCR ?
- xe_gt_mcr_unicast_read_any(gt, XE_REG_MCR(reg)) :
- xe_mmio_read32(gt, reg)) & (~entry->clr_bits);
+ val = (reg.mcr ?
+ xe_gt_mcr_unicast_read_any(gt, reg_mcr) :
+ xe_mmio_read32(gt, reg.reg)) & (~entry->clr_bits);
else
val = 0;
*/
val |= entry->set_bits;
- drm_dbg(&xe->drm, "REG[0x%x] = 0x%08x", reg, val);
+ drm_dbg(&xe->drm, "REG[0x%x] = 0x%08x", reg.reg, val);
- if (entry->reg_type == XE_RTP_REG_MCR)
- xe_gt_mcr_multicast_write(gt, XE_REG_MCR(reg), val);
+ if (entry->reg.mcr)
+ xe_gt_mcr_multicast_write(gt, reg_mcr, val);
else
- xe_mmio_write32(gt, reg, val);
+ xe_mmio_write32(gt, reg.reg, val);
}
void xe_reg_sr_apply_mmio(struct xe_reg_sr *sr, struct xe_gt *gt)
goto err_force_wake;
xa_for_each(&sr->xa, reg, entry)
- apply_one_mmio(gt, reg, entry);
+ apply_one_mmio(gt, entry);
err = xe_force_wake_put(>->mmio.fw, XE_FORCEWAKE_ALL);
XE_WARN_ON(err);
xa_for_each(&sr->xa, reg, entry)
drm_printf(p, "\tREG[0x%lx] clr=0x%08x set=0x%08x masked=%s mcr=%s\n",
reg, entry->clr_bits, entry->set_bits,
- str_yes_no(entry->masked_reg),
- str_yes_no(entry->reg_type == XE_RTP_REG_MCR));
+ str_yes_no(entry->reg.masked),
+ str_yes_no(entry->reg.mcr));
}
int xe_reg_sr_init(struct xe_reg_sr *sr, const char *name, struct xe_device *xe);
void xe_reg_sr_dump(struct xe_reg_sr *sr, struct drm_printer *p);
-int xe_reg_sr_add(struct xe_reg_sr *sr, u32 reg,
- const struct xe_reg_sr_entry *e);
+int xe_reg_sr_add(struct xe_reg_sr *sr, const struct xe_reg_sr_entry *e);
void xe_reg_sr_apply_mmio(struct xe_reg_sr *sr, struct xe_gt *gt);
void xe_reg_sr_apply_whitelist(struct xe_reg_sr *sr, u32 mmio_base,
struct xe_gt *gt);
#include <linux/types.h>
#include <linux/xarray.h>
+#include "regs/xe_reg_defs.h"
+
struct xe_reg_sr_entry {
+ struct xe_reg reg;
u32 clr_bits;
u32 set_bits;
/* Mask for bits to consider when reading value back */
u32 read_mask;
- /*
- * "Masked registers" are marked in spec as register with the upper 16
- * bits as a mask for the bits that is being updated on the lower 16
- * bits when writing to it.
- */
- u8 masked_reg;
- u8 reg_type;
};
struct xe_reg_sr {
#include "xe_platform_types.h"
#include "xe_rtp.h"
-#undef XE_REG
#undef XE_REG_MCR
-#define XE_REG(x, ...) _XE_RTP_REG(x)
-#define XE_REG_MCR(x, ...) _XE_RTP_MCR_REG(x)
+#define XE_REG_MCR(...) XE_REG(__VA_ARGS__, .mcr = 1)
static bool match_not_render(const struct xe_gt *gt,
const struct xe_hw_engine *hwe)
u32 mmio_base,
struct xe_reg_sr *sr)
{
- u32 reg = action->reg + mmio_base;
struct xe_reg_sr_entry sr_entry = {
+ .reg = action->reg,
.clr_bits = action->clr_bits,
.set_bits = action->set_bits,
.read_mask = action->read_mask,
- .masked_reg = action->flags & XE_RTP_ACTION_FLAG_MASKED_REG,
- .reg_type = action->reg_type,
};
- xe_reg_sr_add(sr, reg, &sr_entry);
+ sr_entry.reg.reg += mmio_base;
+ xe_reg_sr_add(sr, &sr_entry);
}
static void rtp_process_one(const struct xe_rtp_entry *entry, struct xe_gt *gt,
/*
* Helper macros - not to be used outside this header.
*/
-/* This counts to 12. Any more, it will return 13th argument. */
-#define __COUNT_ARGS(_0, _1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _n, X...) _n
-#define COUNT_ARGS(X...) __COUNT_ARGS(, ##X, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0)
+#define _XE_ESC(...) __VA_ARGS__
+#define _XE_COUNT_ARGS(...) _XE_ESC(__XE_COUNT_ARGS(__VA_ARGS__,5,4,3,2,1,))
+#define __XE_COUNT_ARGS(_,_5,_4,_3,_2,X_,...) X_
-#define __CONCAT(a, b) a ## b
-#define CONCATENATE(a, b) __CONCAT(a, b)
+#define _XE_FIRST(...) _XE_ESC(__XE_FIRST(__VA_ARGS__,))
+#define __XE_FIRST(x_,...) x_
+#define _XE_TUPLE_TAIL(...) _XE_ESC(__XE_TUPLE_TAIL(__VA_ARGS__))
+#define __XE_TUPLE_TAIL(x_,...) (__VA_ARGS__)
-#define __CALL_FOR_EACH_1(MACRO_, x, ...) MACRO_(x)
-#define __CALL_FOR_EACH_2(MACRO_, x, ...) \
- MACRO_(x) __CALL_FOR_EACH_1(MACRO_, ##__VA_ARGS__)
-#define __CALL_FOR_EACH_3(MACRO_, x, ...) \
- MACRO_(x) __CALL_FOR_EACH_2(MACRO_, ##__VA_ARGS__)
-#define __CALL_FOR_EACH_4(MACRO_, x, ...) \
- MACRO_(x) __CALL_FOR_EACH_3(MACRO_, ##__VA_ARGS__)
+#define _XE_DROP_FIRST(x_, ...) __VA_ARGS__
-#define _CALL_FOR_EACH(NARGS_, MACRO_, x, ...) \
- CONCATENATE(__CALL_FOR_EACH_, NARGS_)(MACRO_, x, ##__VA_ARGS__)
-#define CALL_FOR_EACH(MACRO_, x, ...) \
- _CALL_FOR_EACH(COUNT_ARGS(x, ##__VA_ARGS__), MACRO_, x, ##__VA_ARGS__)
+#define _XE_RTP_CONCAT(a, b) __XE_RTP_CONCAT(a, b)
+#define __XE_RTP_CONCAT(a, b) XE_RTP_ ## a ## b
-#define _XE_RTP_REG(x_) (x_), XE_RTP_REG_REGULAR
-#define _XE_RTP_MCR_REG(x_) (x_), XE_RTP_REG_MCR
+#define __XE_RTP_PASTE_SEP_COMMA ,
+#define __XE_RTP_PASTE_SEP_BITWISE_OR |
/*
- * Helper macros for concatenating prefix - do not use them directly outside
- * this header
+ * XE_RTP_PASTE_FOREACH - Paste XE_RTP_<@prefix_> on each element of the tuple
+ * @args, with the end result separated by @sep_. @sep must be one of the
+ * previously declared macros __XE_RTP_PASTE_SEP_*, or declared with such
+ * prefix.
+ *
+ * Examples:
+ *
+ * 1) XE_RTP_PASTE_FOREACH(TEST_, COMMA, (FOO, BAR))
+ * expands to:
+ *
+ * XE_RTP_TEST_FOO , XE_RTP_TEST_BAR
+ *
+ * 2) XE_RTP_PASTE_FOREACH(TEST2_, COMMA, (FOO))
+ * expands to:
+ *
+ * XE_RTP_TEST2_FOO
+ *
+ * 3) XE_RTP_PASTE_FOREACH(TEST3, BITWISE_OR, (FOO, BAR))
+ * expands to:
+ *
+ * XE_RTP_TEST3_FOO | XE_RTP_TEST3_BAR
+ *
+ * 4) #define __XE_RTP_PASTE_SEP_MY_SEP BANANA
+ * XE_RTP_PASTE_FOREACH(TEST_, MY_SEP, (FOO, BAR))
+ * expands to:
+ *
+ * XE_RTP_TEST_FOO BANANA XE_RTP_TEST_BAR
*/
-#define __ADD_XE_RTP_ENTRY_FLAG_PREFIX(x) CONCATENATE(XE_RTP_ENTRY_FLAG_, x) |
-#define __ADD_XE_RTP_ACTION_FLAG_PREFIX(x) CONCATENATE(XE_RTP_ACTION_FLAG_, x) |
-#define __ADD_XE_RTP_RULE_PREFIX(x) CONCATENATE(XE_RTP_RULE_, x) ,
-#define __ADD_XE_RTP_ACTION_PREFIX(x) CONCATENATE(XE_RTP_ACTION_, x) ,
+#define XE_RTP_PASTE_FOREACH(prefix_, sep_, args_) _XE_ESC(_XE_RTP_CONCAT(PASTE_,_XE_COUNT_ARGS args_)(prefix_, sep_, args_))
+#define XE_RTP_PASTE_1(prefix_, sep_, args_) _XE_RTP_CONCAT(prefix_, _XE_FIRST args_)
+#define XE_RTP_PASTE_2(prefix_, sep_, args_) _XE_RTP_CONCAT(prefix_, _XE_FIRST args_) __XE_RTP_PASTE_SEP_ ## sep_ XE_RTP_PASTE_1(prefix_, sep_, _XE_TUPLE_TAIL args_)
+#define XE_RTP_PASTE_3(prefix_, sep_, args_) _XE_RTP_CONCAT(prefix_, _XE_FIRST args_) __XE_RTP_PASTE_SEP_ ## sep_ XE_RTP_PASTE_2(prefix_, sep_, _XE_TUPLE_TAIL args_)
+#define XE_RTP_PASTE_4(prefix_, sep_, args_) _XE_RTP_CONCAT(prefix_, _XE_FIRST args_) __XE_RTP_PASTE_SEP_ ## sep_ XE_RTP_PASTE_3(prefix_, sep_, _XE_TUPLE_TAIL args_)
+
+
+/*
+ * XE_RTP_DROP_CAST - Drop cast to convert a compound statement to a initializer
+ *
+ * Example:
+ *
+ * #define foo(a_) ((struct foo){ .a = a_ })
+ * XE_RTP_DROP_CAST(foo(10))
+ * expands to:
+ *
+ * { .a = 10 }
+ */
+#define XE_RTP_DROP_CAST(...) _XE_ESC(_XE_DROP_FIRST _XE_ESC __VA_ARGS__)
+
/*
* Macros to encode rules to match against platform, IP version, stepping, etc.
* Shouldn't be used directly - see XE_RTP_RULES()
*/
-
#define _XE_RTP_RULE_PLATFORM(plat__) \
{ .match_type = XE_RTP_MATCH_PLATFORM, .platform = plat__ }
* XE_RTP_ACTION_WR - Helper to write a value to the register, overriding all
* the bits
* @reg_: Register
- * @reg_type_: Register type - automatically expanded by XE_REG
* @val_: Value to set
* @...: Additional fields to override in the struct xe_rtp_action entry
*
*
* REGNAME = VALUE
*/
-#define XE_RTP_ACTION_WR(reg_, reg_type_, val_, ...) \
- { .reg = (reg_), .reg_type = (reg_type_), \
+#define XE_RTP_ACTION_WR(reg_, val_, ...) \
+ { .reg = XE_RTP_DROP_CAST(reg_), \
.clr_bits = ~0u, .set_bits = (val_), \
.read_mask = (~0u), ##__VA_ARGS__ }
/**
* XE_RTP_ACTION_SET - Set bits from @val_ in the register.
* @reg_: Register
- * @reg_type_: Register type - automatically expanded by XE_REG
* @val_: Bits to set in the register
* @...: Additional fields to override in the struct xe_rtp_action entry
*
* REGNAME[2] = 1
* REGNAME[5] = 1
*/
-#define XE_RTP_ACTION_SET(reg_, reg_type_, val_, ...) \
- { .reg = (reg_), .reg_type = (reg_type_), \
- .clr_bits = (val_), .set_bits = (val_), \
- .read_mask = (val_), ##__VA_ARGS__ }
+#define XE_RTP_ACTION_SET(reg_, val_, ...) \
+ { .reg = XE_RTP_DROP_CAST(reg_), \
+ .clr_bits = val_, .set_bits = val_, \
+ .read_mask = val_, ##__VA_ARGS__ }
/**
* XE_RTP_ACTION_CLR: Clear bits from @val_ in the register.
* @reg_: Register
- * @reg_type_: Register type - automatically expanded by XE_REG
* @val_: Bits to clear in the register
* @...: Additional fields to override in the struct xe_rtp_action entry
*
* REGNAME[2] = 0
* REGNAME[5] = 0
*/
-#define XE_RTP_ACTION_CLR(reg_, reg_type_, val_, ...) \
- { .reg = (reg_), .reg_type = (reg_type_), \
- .clr_bits = (val_), .set_bits = 0, \
- .read_mask = (val_), ##__VA_ARGS__ }
+#define XE_RTP_ACTION_CLR(reg_, val_, ...) \
+ { .reg = XE_RTP_DROP_CAST(reg_), \
+ .clr_bits = val_, .set_bits = 0, \
+ .read_mask = val_, ##__VA_ARGS__ }
/**
* XE_RTP_ACTION_FIELD_SET: Set a bit range
* @reg_: Register
- * @reg_type_: Register type - automatically expanded by XE_REG
* @mask_bits_: Mask of bits to be changed in the register, forming a field
* @val_: Value to set in the field denoted by @mask_bits_
* @...: Additional fields to override in the struct xe_rtp_action entry
*
* REGNAME[<end>:<start>] = VALUE
*/
-#define XE_RTP_ACTION_FIELD_SET(reg_, reg_type_, mask_bits_, val_, ...) \
- { .reg = (reg_), .reg_type = (reg_type_), \
- .clr_bits = (mask_bits_), .set_bits = (val_), \
- .read_mask = (mask_bits_), ##__VA_ARGS__ }
+#define XE_RTP_ACTION_FIELD_SET(reg_, mask_bits_, val_, ...) \
+ { .reg = XE_RTP_DROP_CAST(reg_), \
+ .clr_bits = mask_bits_, .set_bits = val_, \
+ .read_mask = mask_bits_, ##__VA_ARGS__ }
-#define XE_RTP_ACTION_FIELD_SET_NO_READ_MASK(reg_, reg_type_, mask_bits_, val_, ...) \
- { .reg = (reg_), .reg_type = (reg_type_), \
+#define XE_RTP_ACTION_FIELD_SET_NO_READ_MASK(reg_, mask_bits_, val_, ...) \
+ { .reg = XE_RTP_DROP_CAST(reg_), \
.clr_bits = (mask_bits_), .set_bits = (val_), \
.read_mask = 0, ##__VA_ARGS__ }
/**
* XE_RTP_ACTION_WHITELIST - Add register to userspace whitelist
* @reg_: Register
- * @reg_type_: Register type - automatically expanded by XE_REG
* @val_: Whitelist-specific flags to set
* @...: Additional fields to override in the struct xe_rtp_action entry
*
* Add a register to the whitelist, allowing userspace to modify the ster with
* regular user privileges.
*/
-#define XE_RTP_ACTION_WHITELIST(reg_, reg_type_, val_, ...) \
+#define XE_RTP_ACTION_WHITELIST(reg_, val_, ...) \
/* TODO fail build if ((flags) & ~(RING_FORCE_TO_NONPRIV_MASK_VALID)) */\
- { .reg = (reg_), .reg_type = (reg_type_), .set_bits = (val_), \
+ { .reg = XE_RTP_DROP_CAST(reg_), \
+ .set_bits = val_, \
.clr_bits = RING_FORCE_TO_NONPRIV_MASK_VALID, \
##__VA_ARGS__ }
/**
* XE_RTP_ENTRY_FLAG - Helper to add multiple flags to a struct xe_rtp_entry
- * @f1_: Last part of a ``XE_RTP_ENTRY_FLAG_*``
- * @...: Additional flags, defined like @f1_
+ * @...: Entry flags, without the ``XE_RTP_ENTRY_FLAG_`` prefix
*
- * Helper to automatically add a ``XE_RTP_ENTRY_FLAG_`` prefix to @f1_ so it can
- * be easily used to define struct xe_rtp_action entries. Example:
+ * Helper to automatically add a ``XE_RTP_ENTRY_FLAG_`` prefix to the flags
+ * when defining struct xe_rtp_entry entries. Example:
*
* .. code-block:: c
*
* ...
* };
*/
-#define XE_RTP_ENTRY_FLAG(f1_, ...) \
- .flags = (CALL_FOR_EACH(__ADD_XE_RTP_ENTRY_FLAG_PREFIX, f1_, ##__VA_ARGS__) 0)
+#define XE_RTP_ENTRY_FLAG(...) \
+ .flags = (XE_RTP_PASTE_FOREACH(ENTRY_FLAG_, BITWISE_OR, (__VA_ARGS__)))
/**
* XE_RTP_ACTION_FLAG - Helper to add multiple flags to a struct xe_rtp_action
- * @f1_: Last part of a ``XE_RTP_ENTRY_*``
- * @...: Additional flags, defined like @f1_
+ * @...: Action flags, without the ``XE_RTP_ACTION_FLAG_`` prefix
*
- * Helper to automatically add a ``XE_RTP_ACTION_FLAG_`` prefix to @f1_ so it
- * can be easily used to define struct xe_rtp_action entries. Example:
+ * Helper to automatically add a ``XE_RTP_ACTION_FLAG_`` prefix to the flags
+ * when defining struct xe_rtp_action entries. Example:
*
* .. code-block:: c
*
* ...
* };
*/
-#define XE_RTP_ACTION_FLAG(f1_, ...) \
- .flags = (CALL_FOR_EACH(__ADD_XE_RTP_ACTION_FLAG_PREFIX, f1_, ##__VA_ARGS__) 0)
+#define XE_RTP_ACTION_FLAG(...) \
+ .flags = (XE_RTP_PASTE_FOREACH(ACTION_FLAG_, BITWISE_OR, (__VA_ARGS__)))
/**
* XE_RTP_RULES - Helper to set multiple rules to a struct xe_rtp_entry entry
- * @r1: Last part of XE_RTP_MATCH_*
- * @...: Additional rules, defined like @r1
+ * @...: Rules
*
* At least one rule is needed and up to 4 are supported. Multiple rules are
* AND'ed together, i.e. all the rules must evaluate to true for the entry to
* ...
* };
*/
-#define XE_RTP_RULES(r1, ...) \
- .n_rules = COUNT_ARGS(r1, ##__VA_ARGS__), \
+#define XE_RTP_RULES(...) \
+ .n_rules = _XE_COUNT_ARGS(__VA_ARGS__), \
.rules = (const struct xe_rtp_rule[]) { \
- CALL_FOR_EACH(__ADD_XE_RTP_RULE_PREFIX, r1, ##__VA_ARGS__) \
+ XE_RTP_PASTE_FOREACH(RULE_, COMMA, (__VA_ARGS__)) \
}
/**
* XE_RTP_ACTIONS - Helper to set multiple actions to a struct xe_rtp_entry
- * @a1: Action to take. Last part of XE_RTP_ACTION_*
- * @...: Additional rules, defined like @r1
+ * @...: Actions to be taken
*
* At least one rule is needed and up to 4 are supported. Multiple rules are
* AND'ed together, i.e. all the rules must evaluate to true for the entry to
* ...
* };
*/
-#define XE_RTP_ACTIONS(a1, ...) \
- .n_actions = COUNT_ARGS(a1, ##__VA_ARGS__), \
+#define XE_RTP_ACTIONS(...) \
+ .n_actions = _XE_COUNT_ARGS(__VA_ARGS__), \
.actions = (const struct xe_rtp_action[]) { \
- CALL_FOR_EACH(__ADD_XE_RTP_ACTION_PREFIX, a1, ##__VA_ARGS__) \
+ XE_RTP_PASTE_FOREACH(ACTION_, COMMA, (__VA_ARGS__)) \
}
void xe_rtp_process(const struct xe_rtp_entry *entries, struct xe_reg_sr *sr,
#include <linux/types.h>
+#include "regs/xe_reg_defs.h"
+
struct xe_hw_engine;
struct xe_gt;
-enum {
- XE_RTP_REG_REGULAR,
- XE_RTP_REG_MCR,
-};
-
/**
* struct xe_rtp_action - action to take for any matching rule
*
*/
struct xe_rtp_action {
/** @reg: Register */
- u32 reg;
+ struct xe_reg reg;
/** @clr_bits: bits to clear when updating register */
- u32 clr_bits;
+ u32 clr_bits;
/** @set_bits: bits to set when updating register */
- u32 set_bits;
+ u32 set_bits;
#define XE_RTP_NOCHECK .read_mask = 0
/** @read_mask: mask for bits to consider when reading value back */
- u32 read_mask;
-#define XE_RTP_ACTION_FLAG_MASKED_REG BIT(0)
-#define XE_RTP_ACTION_FLAG_ENGINE_BASE BIT(1)
+ u32 read_mask;
+#define XE_RTP_ACTION_FLAG_ENGINE_BASE BIT(0)
/** @flags: flags to apply on rule evaluation or action */
- u8 flags;
- /** @reg_type: register type, see ``XE_RTP_REG_*`` */
- u8 reg_type;
+ u8 flags;
};
enum {
#include "xe_platform_types.h"
#include "xe_rtp.h"
-#undef XE_REG
#undef XE_REG_MCR
-#define XE_REG(x, ...) _XE_RTP_REG(x)
-#define XE_REG_MCR(x, ...) _XE_RTP_MCR_REG(x)
+#define XE_REG_MCR(...) XE_REG(__VA_ARGS__, .mcr = 1)
static const struct xe_rtp_entry gt_tunings[] = {
{ XE_RTP_NAME("Tuning: Blend Fill Caching Optimization Disable"),
},
{ XE_RTP_NAME("Tuning: TBIMR fast clip"),
XE_RTP_RULES(PLATFORM(DG2)),
- XE_RTP_ACTIONS(SET(CHICKEN_RASTER_2, TBIMR_FAST_CLIP,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(CHICKEN_RASTER_2, TBIMR_FAST_CLIP))
},
{}
};
* a more declarative approach rather than procedural.
*/
-#undef XE_REG
#undef XE_REG_MCR
-#define XE_REG(x, ...) _XE_RTP_REG(x)
-#define XE_REG_MCR(x, ...) _XE_RTP_MCR_REG(x)
+#define XE_REG_MCR(...) XE_REG(__VA_ARGS__, .mcr = 1)
__diag_push();
__diag_ignore_all("-Woverride-init", "Allow field overrides in table");
},
{ XE_RTP_NAME("16016694945"),
XE_RTP_RULES(PLATFORM(PVC)),
- XE_RTP_ACTIONS(SET(XEHPC_LNCFMISCCFGREG0, XEHPC_OVRLSCCC,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(XEHPC_LNCFMISCCFGREG0, XEHPC_OVRLSCCC))
},
{}
};
XE_RTP_RULES(GRAPHICS_VERSION(1200),
ENGINE_CLASS(RENDER),
IS_INTEGRATED),
- XE_RTP_ACTIONS(SET(ROW_CHICKEN2, PUSH_CONST_DEREF_HOLD_DIS,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(ROW_CHICKEN2, PUSH_CONST_DEREF_HOLD_DIS))
},
{ XE_RTP_NAME("14010229206, 1409085225"),
XE_RTP_RULES(GRAPHICS_VERSION(1200),
ENGINE_CLASS(RENDER),
IS_INTEGRATED),
- XE_RTP_ACTIONS(SET(ROW_CHICKEN4, DISABLE_TDL_PUSH,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(ROW_CHICKEN4, DISABLE_TDL_PUSH))
},
{ XE_RTP_NAME("1606931601"),
XE_RTP_RULES(GRAPHICS_VERSION_RANGE(1200, 1210), ENGINE_CLASS(RENDER)),
- XE_RTP_ACTIONS(SET(ROW_CHICKEN2, DISABLE_EARLY_READ,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(ROW_CHICKEN2, DISABLE_EARLY_READ))
},
{ XE_RTP_NAME("14010826681, 1606700617, 22010271021, 18019627453"),
XE_RTP_RULES(GRAPHICS_VERSION_RANGE(1200, 1255), ENGINE_CLASS(RENDER)),
- XE_RTP_ACTIONS(SET(CS_DEBUG_MODE1, FF_DOP_CLOCK_GATE_DISABLE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(CS_DEBUG_MODE1, FF_DOP_CLOCK_GATE_DISABLE))
},
{ XE_RTP_NAME("1406941453"),
XE_RTP_RULES(GRAPHICS_VERSION_RANGE(1200, 1210), ENGINE_CLASS(RENDER)),
- XE_RTP_ACTIONS(SET(SAMPLER_MODE, ENABLE_SMALLPL,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(SAMPLER_MODE, ENABLE_SMALLPL))
},
{ XE_RTP_NAME("FtrPerCtxtPreemptionGranularityControl"),
XE_RTP_RULES(GRAPHICS_VERSION_RANGE(1200, 1250), ENGINE_CLASS(RENDER)),
XE_RTP_ACTIONS(SET(FF_SLICE_CS_CHICKEN1,
- FFSC_PERCTX_PREEMPT_CTRL,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ FFSC_PERCTX_PREEMPT_CTRL))
},
/* TGL */
XE_RTP_RULES(PLATFORM(TIGERLAKE), ENGINE_CLASS(RENDER)),
XE_RTP_ACTIONS(SET(RING_PSMI_CTL(RENDER_RING_BASE),
WAIT_FOR_EVENT_POWER_DOWN_DISABLE |
- RC_SEMA_IDLE_MSG_DISABLE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ RC_SEMA_IDLE_MSG_DISABLE))
},
/* RKL */
XE_RTP_RULES(PLATFORM(ROCKETLAKE), ENGINE_CLASS(RENDER)),
XE_RTP_ACTIONS(SET(RING_PSMI_CTL(RENDER_RING_BASE),
WAIT_FOR_EVENT_POWER_DOWN_DISABLE |
- RC_SEMA_IDLE_MSG_DISABLE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ RC_SEMA_IDLE_MSG_DISABLE))
},
/* ADL-P */
XE_RTP_RULES(PLATFORM(ALDERLAKE_P), ENGINE_CLASS(RENDER)),
XE_RTP_ACTIONS(SET(RING_PSMI_CTL(RENDER_RING_BASE),
WAIT_FOR_EVENT_POWER_DOWN_DISABLE |
- RC_SEMA_IDLE_MSG_DISABLE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ RC_SEMA_IDLE_MSG_DISABLE))
},
/* DG2 */
{ XE_RTP_NAME("18017747507"),
XE_RTP_RULES(PLATFORM(DG2), FUNC(xe_rtp_match_first_render_or_compute)),
XE_RTP_ACTIONS(SET(VFG_PREEMPTION_CHICKEN,
- POLYGON_TRIFAN_LINELOOP_DISABLE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ POLYGON_TRIFAN_LINELOOP_DISABLE))
},
{ XE_RTP_NAME("22012826095, 22013059131"),
XE_RTP_RULES(SUBPLATFORM(DG2, G10), STEP(B0, C0),
{ XE_RTP_NAME("14015227452"),
XE_RTP_RULES(PLATFORM(DG2),
FUNC(xe_rtp_match_first_render_or_compute)),
- XE_RTP_ACTIONS(SET(ROW_CHICKEN4, XEHP_DIS_BBL_SYSPIPE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(ROW_CHICKEN4, XEHP_DIS_BBL_SYSPIPE))
},
{ XE_RTP_NAME("16015675438"),
XE_RTP_RULES(PLATFORM(DG2),
FUNC(xe_rtp_match_first_render_or_compute)),
XE_RTP_ACTIONS(SET(FF_SLICE_CS_CHICKEN2,
- PERF_FIX_BALANCING_CFE_DISABLE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ PERF_FIX_BALANCING_CFE_DISABLE))
},
{ XE_RTP_NAME("16011620976, 22015475538"),
XE_RTP_RULES(PLATFORM(DG2),
XE_RTP_RULES(SUBPLATFORM(DG2, G10), STEP(A0, C0),
FUNC(xe_rtp_match_first_render_or_compute)),
XE_RTP_ACTIONS(SET(CACHE_MODE_SS, ENABLE_PREFETCH_INTO_IC,
- XE_RTP_ACTION_FLAG(MASKED_REG),
/*
* Register can't be read back for verification on
* DG2 due to Wa_14012342262
XE_RTP_RULES(SUBPLATFORM(DG2, G11),
FUNC(xe_rtp_match_first_render_or_compute)),
XE_RTP_ACTIONS(SET(CACHE_MODE_SS, ENABLE_PREFETCH_INTO_IC,
- XE_RTP_ACTION_FLAG(MASKED_REG),
/*
* Register can't be read back for verification on
* DG2 due to Wa_14012342262
},
{ XE_RTP_NAME("1509727124"),
XE_RTP_RULES(PLATFORM(DG2), ENGINE_CLASS(RENDER)),
- XE_RTP_ACTIONS(SET(SAMPLER_MODE, SC_DISABLE_POWER_OPTIMIZATION_EBB,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(SAMPLER_MODE, SC_DISABLE_POWER_OPTIMIZATION_EBB))
},
{ XE_RTP_NAME("22012856258"),
XE_RTP_RULES(PLATFORM(DG2), ENGINE_CLASS(RENDER)),
- XE_RTP_ACTIONS(SET(ROW_CHICKEN2, DISABLE_READ_SUPPRESSION,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(ROW_CHICKEN2, DISABLE_READ_SUPPRESSION))
},
{ XE_RTP_NAME("14013392000"),
XE_RTP_RULES(SUBPLATFORM(DG2, G11), STEP(A0, B0), ENGINE_CLASS(RENDER)),
- XE_RTP_ACTIONS(SET(ROW_CHICKEN2, ENABLE_LARGE_GRF_MODE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(ROW_CHICKEN2, ENABLE_LARGE_GRF_MODE))
},
{ XE_RTP_NAME("14012419201"),
XE_RTP_RULES(SUBPLATFORM(DG2, G10), STEP(A0, B0), ENGINE_CLASS(RENDER)),
XE_RTP_ACTIONS(SET(ROW_CHICKEN4,
- DISABLE_HDR_PAST_PAYLOAD_HOLD_FIX,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ DISABLE_HDR_PAST_PAYLOAD_HOLD_FIX))
},
{ XE_RTP_NAME("14012419201"),
XE_RTP_RULES(SUBPLATFORM(DG2, G11), STEP(A0, B0), ENGINE_CLASS(RENDER)),
XE_RTP_ACTIONS(SET(ROW_CHICKEN4,
- DISABLE_HDR_PAST_PAYLOAD_HOLD_FIX,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ DISABLE_HDR_PAST_PAYLOAD_HOLD_FIX))
},
{ XE_RTP_NAME("1308578152"),
XE_RTP_RULES(SUBPLATFORM(DG2, G10), STEP(B0, C0), ENGINE_CLASS(RENDER),
FUNC(xe_rtp_match_first_gslice_fused_off)),
XE_RTP_ACTIONS(CLR(CS_DEBUG_MODE1,
- REPLAY_MODE_GRANULARITY,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ REPLAY_MODE_GRANULARITY))
},
{ XE_RTP_NAME("22010960976, 14013347512"),
XE_RTP_RULES(PLATFORM(DG2), ENGINE_CLASS(RENDER)),
XE_RTP_ACTIONS(CLR(XEHP_HDC_CHICKEN0,
- LSC_L1_FLUSH_CTL_3D_DATAPORT_FLUSH_EVENTS_MASK,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ LSC_L1_FLUSH_CTL_3D_DATAPORT_FLUSH_EVENTS_MASK))
},
{ XE_RTP_NAME("1608949956, 14010198302"),
XE_RTP_RULES(PLATFORM(DG2), ENGINE_CLASS(RENDER)),
XE_RTP_ACTIONS(SET(ROW_CHICKEN,
- MDQ_ARBITRATION_MODE | UGM_BACKUP_MODE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ MDQ_ARBITRATION_MODE | UGM_BACKUP_MODE))
},
{ XE_RTP_NAME("22010430635"),
XE_RTP_RULES(SUBPLATFORM(DG2, G10), STEP(A0, B0), ENGINE_CLASS(RENDER)),
XE_RTP_ACTIONS(SET(ROW_CHICKEN4,
- DISABLE_GRF_CLEAR,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ DISABLE_GRF_CLEAR))
},
{ XE_RTP_NAME("14013202645"),
XE_RTP_RULES(SUBPLATFORM(DG2, G10), STEP(B0, C0), ENGINE_CLASS(RENDER)),
{ XE_RTP_NAME("22012532006"),
XE_RTP_RULES(SUBPLATFORM(DG2, G10), STEP(A0, C0), ENGINE_CLASS(RENDER)),
XE_RTP_ACTIONS(SET(HALF_SLICE_CHICKEN7,
- DG2_DISABLE_ROUND_ENABLE_ALLOW_FOR_SSLA,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ DG2_DISABLE_ROUND_ENABLE_ALLOW_FOR_SSLA))
},
{ XE_RTP_NAME("22012532006"),
XE_RTP_RULES(SUBPLATFORM(DG2, G11), STEP(A0, B0), ENGINE_CLASS(RENDER)),
XE_RTP_ACTIONS(SET(HALF_SLICE_CHICKEN7,
- DG2_DISABLE_ROUND_ENABLE_ALLOW_FOR_SSLA,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ DG2_DISABLE_ROUND_ENABLE_ALLOW_FOR_SSLA))
},
{ XE_RTP_NAME("22014600077"),
XE_RTP_RULES(SUBPLATFORM(DG2, G11), STEP(B0, FOREVER),
ENGINE_CLASS(RENDER)),
XE_RTP_ACTIONS(SET(CACHE_MODE_SS,
ENABLE_EU_COUNT_FOR_TDL_FLUSH,
- XE_RTP_ACTION_FLAG(MASKED_REG),
/*
* Wa_14012342262 write-only reg, so skip
* verification
XE_RTP_RULES(SUBPLATFORM(DG2, G10), ENGINE_CLASS(RENDER)),
XE_RTP_ACTIONS(SET(CACHE_MODE_SS,
ENABLE_EU_COUNT_FOR_TDL_FLUSH,
- XE_RTP_ACTION_FLAG(MASKED_REG),
/*
* Wa_14012342262 write-only reg, so skip
* verification
},
{ XE_RTP_NAME("14015227452"),
XE_RTP_RULES(PLATFORM(PVC), FUNC(xe_rtp_match_first_render_or_compute)),
- XE_RTP_ACTIONS(SET(ROW_CHICKEN4, XEHP_DIS_BBL_SYSPIPE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(ROW_CHICKEN4, XEHP_DIS_BBL_SYSPIPE))
},
{ XE_RTP_NAME("16015675438"),
XE_RTP_RULES(PLATFORM(PVC), FUNC(xe_rtp_match_first_render_or_compute)),
- XE_RTP_ACTIONS(SET(FF_SLICE_CS_CHICKEN2, PERF_FIX_BALANCING_CFE_DISABLE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(FF_SLICE_CS_CHICKEN2, PERF_FIX_BALANCING_CFE_DISABLE))
},
{ XE_RTP_NAME("14014999345"),
XE_RTP_RULES(PLATFORM(PVC), ENGINE_CLASS(COMPUTE), STEP(B0, C0)),
- XE_RTP_ACTIONS(SET(CACHE_MODE_SS, DISABLE_ECC,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(CACHE_MODE_SS, DISABLE_ECC))
},
{}
};
{ XE_RTP_NAME("1409342910, 14010698770, 14010443199, 1408979724, 1409178076, 1409207793, 1409217633, 1409252684, 1409347922, 1409142259"),
XE_RTP_RULES(GRAPHICS_VERSION_RANGE(1200, 1210)),
XE_RTP_ACTIONS(SET(COMMON_SLICE_CHICKEN3,
- DISABLE_CPS_AWARE_COLOR_PIPE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ DISABLE_CPS_AWARE_COLOR_PIPE))
},
{ XE_RTP_NAME("WaDisableGPGPUMidThreadPreemption"),
XE_RTP_RULES(GRAPHICS_VERSION_RANGE(1200, 1210)),
XE_RTP_ACTIONS(FIELD_SET(CS_CHICKEN1,
PREEMPT_GPGPU_LEVEL_MASK,
- PREEMPT_GPGPU_THREAD_GROUP_LEVEL,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ PREEMPT_GPGPU_THREAD_GROUP_LEVEL))
},
{ XE_RTP_NAME("1806527549"),
XE_RTP_RULES(GRAPHICS_VERSION(1200)),
- XE_RTP_ACTIONS(SET(HIZ_CHICKEN, HZ_DEPTH_TEST_LE_GE_OPT_DISABLE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(HIZ_CHICKEN, HZ_DEPTH_TEST_LE_GE_OPT_DISABLE))
},
{ XE_RTP_NAME("1606376872"),
XE_RTP_RULES(GRAPHICS_VERSION(1200)),
- XE_RTP_ACTIONS(SET(COMMON_SLICE_CHICKEN4, DISABLE_TDC_LOAD_BALANCING_CALC,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(COMMON_SLICE_CHICKEN4, DISABLE_TDC_LOAD_BALANCING_CALC))
},
/* DG1 */
{ XE_RTP_NAME("1409044764"),
XE_RTP_RULES(PLATFORM(DG1)),
XE_RTP_ACTIONS(CLR(COMMON_SLICE_CHICKEN3,
- DG1_FLOAT_POINT_BLEND_OPT_STRICT_MODE_EN,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ DG1_FLOAT_POINT_BLEND_OPT_STRICT_MODE_EN))
},
{ XE_RTP_NAME("22010493298"),
XE_RTP_RULES(PLATFORM(DG1)),
XE_RTP_ACTIONS(SET(HIZ_CHICKEN,
- DG1_HZ_READ_SUPPRESSION_OPTIMIZATION_DISABLE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ DG1_HZ_READ_SUPPRESSION_OPTIMIZATION_DISABLE))
},
/* DG2 */
{ XE_RTP_NAME("16011186671"),
XE_RTP_RULES(SUBPLATFORM(DG2, G11), STEP(A0, B0)),
- XE_RTP_ACTIONS(CLR(VFLSKPD, DIS_MULT_MISS_RD_SQUASH,
- .flags = XE_RTP_ACTION_FLAG_MASKED_REG),
- SET(VFLSKPD, DIS_OVER_FETCH_CACHE,
- .flags = XE_RTP_ACTION_FLAG_MASKED_REG))
+ XE_RTP_ACTIONS(CLR(VFLSKPD, DIS_MULT_MISS_RD_SQUASH),
+ SET(VFLSKPD, DIS_OVER_FETCH_CACHE))
},
{ XE_RTP_NAME("14010469329"),
XE_RTP_RULES(SUBPLATFORM(DG2, G10), STEP(A0, B0)),
XE_RTP_ACTIONS(SET(XEHP_COMMON_SLICE_CHICKEN3,
- XEHP_DUAL_SIMD8_SEQ_MERGE_DISABLE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XEHP_DUAL_SIMD8_SEQ_MERGE_DISABLE))
},
{ XE_RTP_NAME("14010698770, 22010613112, 22010465075"),
XE_RTP_RULES(SUBPLATFORM(DG2, G10), STEP(A0, B0)),
XE_RTP_ACTIONS(SET(XEHP_COMMON_SLICE_CHICKEN3,
- DISABLE_CPS_AWARE_COLOR_PIPE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ DISABLE_CPS_AWARE_COLOR_PIPE))
},
{ XE_RTP_NAME("16013271637"),
XE_RTP_RULES(PLATFORM(DG2)),
XE_RTP_ACTIONS(SET(XEHP_SLICE_COMMON_ECO_CHICKEN1,
- MSC_MSAA_REODER_BUF_BYPASS_DISABLE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ MSC_MSAA_REODER_BUF_BYPASS_DISABLE))
},
{ XE_RTP_NAME("14014947963"),
XE_RTP_RULES(PLATFORM(DG2)),
XE_RTP_ACTIONS(FIELD_SET(VF_PREEMPTION,
PREEMPTION_VERTEX_COUNT,
- 0x4000,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ 0x4000))
},
{ XE_RTP_NAME("18018764978"),
XE_RTP_RULES(PLATFORM(DG2)),
XE_RTP_ACTIONS(SET(XEHP_PSS_MODE2,
- SCOREBOARD_STALL_FLUSH_CONTROL,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ SCOREBOARD_STALL_FLUSH_CONTROL))
},
{ XE_RTP_NAME("15010599737"),
XE_RTP_RULES(PLATFORM(DG2)),
- XE_RTP_ACTIONS(SET(CHICKEN_RASTER_1, DIS_SF_ROUND_NEAREST_EVEN,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(CHICKEN_RASTER_1, DIS_SF_ROUND_NEAREST_EVEN))
},
{ XE_RTP_NAME("18019271663"),
XE_RTP_RULES(PLATFORM(DG2)),
- XE_RTP_ACTIONS(SET(CACHE_MODE_1, MSAA_OPTIMIZATION_REDUC_DISABLE,
- XE_RTP_ACTION_FLAG(MASKED_REG)))
+ XE_RTP_ACTIONS(SET(CACHE_MODE_1, MSAA_OPTIMIZATION_REDUC_DISABLE))
},
{}
};