switch ((insn >> 12) & 0xF) {
case 0x0:
/* Longword physical access (hw_ldl/p) */
- tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL);
+ tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL | MO_ALIGN);
break;
case 0x1:
/* Quadword physical access (hw_ldq/p) */
- tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEUQ);
+ tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEUQ | MO_ALIGN);
break;
case 0x2:
/* Longword physical access with lock (hw_ldl_l/p) */
- tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL);
+ tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LESL | MO_ALIGN);
tcg_gen_mov_i64(cpu_lock_addr, addr);
tcg_gen_mov_i64(cpu_lock_value, va);
break;
case 0x3:
/* Quadword physical access with lock (hw_ldq_l/p) */
- tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEUQ);
+ tcg_gen_qemu_ld_i64(va, addr, MMU_PHYS_IDX, MO_LEUQ | MO_ALIGN);
tcg_gen_mov_i64(cpu_lock_addr, addr);
tcg_gen_mov_i64(cpu_lock_value, va);
break;
goto invalid_opc;
case 0xA:
/* Longword virtual access with protection check (hw_ldl/w) */
- tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LESL);
+ tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX,
+ MO_LESL | MO_ALIGN);
break;
case 0xB:
/* Quadword virtual access with protection check (hw_ldq/w) */
- tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LEUQ);
+ tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX,
+ MO_LEUQ | MO_ALIGN);
break;
case 0xC:
/* Longword virtual access with alt access mode (hw_ldl/a)*/
case 0xE:
/* Longword virtual access with alternate access mode and
protection checks (hw_ldl/wa) */
- tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LESL);
+ tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX,
+ MO_LESL | MO_ALIGN);
break;
case 0xF:
/* Quadword virtual access with alternate access mode and
protection checks (hw_ldq/wa) */
- tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LEUQ);
+ tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX,
+ MO_LEUQ | MO_ALIGN);
break;
}
break;
vb = load_gpr(ctx, rb);
tmp = tcg_temp_new();
tcg_gen_addi_i64(tmp, vb, disp12);
- tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LESL);
+ tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LESL | MO_ALIGN);
break;
case 0x1:
/* Quadword physical access */
vb = load_gpr(ctx, rb);
tmp = tcg_temp_new();
tcg_gen_addi_i64(tmp, vb, disp12);
- tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LEUQ);
+ tcg_gen_qemu_st_i64(va, tmp, MMU_PHYS_IDX, MO_LEUQ | MO_ALIGN);
break;
case 0x2:
/* Longword physical access with lock */
ret = gen_store_conditional(ctx, ra, rb, disp12,
- MMU_PHYS_IDX, MO_LESL);
+ MMU_PHYS_IDX, MO_LESL | MO_ALIGN);
break;
case 0x3:
/* Quadword physical access with lock */
ret = gen_store_conditional(ctx, ra, rb, disp12,
- MMU_PHYS_IDX, MO_LEUQ);
+ MMU_PHYS_IDX, MO_LEUQ | MO_ALIGN);
break;
case 0x4:
/* Longword virtual access */
break;
case 0x2A:
/* LDL_L */
- gen_load_int(ctx, ra, rb, disp16, MO_LESL, 0, 1);
+ gen_load_int(ctx, ra, rb, disp16, MO_LESL | MO_ALIGN, 0, 1);
break;
case 0x2B:
/* LDQ_L */
- gen_load_int(ctx, ra, rb, disp16, MO_LEUQ, 0, 1);
+ gen_load_int(ctx, ra, rb, disp16, MO_LEUQ | MO_ALIGN, 0, 1);
break;
case 0x2C:
/* STL */
case 0x2E:
/* STL_C */
ret = gen_store_conditional(ctx, ra, rb, disp16,
- ctx->mem_idx, MO_LESL);
+ ctx->mem_idx, MO_LESL | MO_ALIGN);
break;
case 0x2F:
/* STQ_C */
ret = gen_store_conditional(ctx, ra, rb, disp16,
- ctx->mem_idx, MO_LEUQ);
+ ctx->mem_idx, MO_LEUQ | MO_ALIGN);
break;
case 0x30:
/* BR */