#if defined(__arm__) || defined(_ARCH_PPC) \
|| defined(__x86_64__) || defined(__i386__) \
|| defined(__sparc__) || defined(__aarch64__) \
+ || defined(__s390x__) \
|| defined(CONFIG_TCG_INTERPRETER)
#define USE_DIRECT_JUMP
#endif
stl_le_p((void*)jmp_addr, addr - (jmp_addr + 4));
/* no need to flush icache explicitly */
}
+#elif defined(__s390x__)
+static inline void tb_set_jmp_target1(uintptr_t jmp_addr, uintptr_t addr)
+{
+ /* patch the branch destination */
+ intptr_t disp = addr - (jmp_addr - 2);
+ stl_be_p((void*)jmp_addr, disp / 2);
+ /* no need to flush icache explicitly */
+}
#elif defined(__aarch64__)
void aarch64_tb_set_jmp_target(uintptr_t jmp_addr, uintptr_t addr);
#define tb_set_jmp_target1 aarch64_tb_set_jmp_target
case INDEX_op_goto_tb:
if (s->tb_jmp_offset) {
- tcg_abort();
+ tcg_out16(s, RIL_BRCL | (S390_CC_ALWAYS << 4));
+ s->tb_jmp_offset[args[0]] = tcg_current_code_size(s);
+ s->code_ptr += 2;
} else {
/* load address stored at s->tb_next + args[0] */
tcg_out_ld_abs(s, TCG_TYPE_PTR, TCG_TMP0, s->tb_next + args[0]);