#define ICH_LR_EOI                     (1UL << 41)
 #define ICH_LR_GROUP                   (1UL << 60)
+#define ICH_LR_HW                      (1UL << 61)
 #define ICH_LR_STATE                   (3UL << 62)
 #define ICH_LR_PENDING_BIT             (1UL << 62)
 #define ICH_LR_ACTIVE_BIT              (1UL << 63)
+#define ICH_LR_PHYS_ID_SHIFT           32
+#define ICH_LR_PHYS_ID_MASK            (0x3ffUL << ICH_LR_PHYS_ID_SHIFT)
 
 #define ICH_MISR_EOI                   (1 << 0)
 #define ICH_MISR_U                     (1 << 1)
 
 
 #define GICH_LR_VIRTUALID              (0x3ff << 0)
 #define GICH_LR_PHYSID_CPUID_SHIFT     (10)
-#define GICH_LR_PHYSID_CPUID           (7 << GICH_LR_PHYSID_CPUID_SHIFT)
+#define GICH_LR_PHYSID_CPUID           (0x3ff << GICH_LR_PHYSID_CPUID_SHIFT)
 #define GICH_LR_STATE                  (3 << 28)
 #define GICH_LR_PENDING_BIT            (1 << 28)
 #define GICH_LR_ACTIVE_BIT             (1 << 29)
 #define GICH_LR_EOI                    (1 << 19)
+#define GICH_LR_HW                     (1 << 31)
 
 #define GICH_VMCR_CTRL_SHIFT           0
 #define GICH_VMCR_CTRL_MASK            (0x21f << GICH_VMCR_CTRL_SHIFT)
 
                lr_desc.state |= LR_STATE_ACTIVE;
        if (val & GICH_LR_EOI)
                lr_desc.state |= LR_EOI_INT;
+       if (val & GICH_LR_HW) {
+               lr_desc.state |= LR_HW;
+               lr_desc.hwirq = (val & GICH_LR_PHYSID_CPUID) >> GICH_LR_PHYSID_CPUID_SHIFT;
+       }
 
        return lr_desc;
 }
 static void vgic_v2_set_lr(struct kvm_vcpu *vcpu, int lr,
                           struct vgic_lr lr_desc)
 {
-       u32 lr_val = (lr_desc.source << GICH_LR_PHYSID_CPUID_SHIFT) | lr_desc.irq;
+       u32 lr_val;
+
+       lr_val = lr_desc.irq;
 
        if (lr_desc.state & LR_STATE_PENDING)
                lr_val |= GICH_LR_PENDING_BIT;
        if (lr_desc.state & LR_EOI_INT)
                lr_val |= GICH_LR_EOI;
 
+       if (lr_desc.state & LR_HW) {
+               lr_val |= GICH_LR_HW;
+               lr_val |= (u32)lr_desc.hwirq << GICH_LR_PHYSID_CPUID_SHIFT;
+       }
+
+       if (lr_desc.irq < VGIC_NR_SGIS)
+               lr_val |= (lr_desc.source << GICH_LR_PHYSID_CPUID_SHIFT);
+
        vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = lr_val;
 }
 
 
                lr_desc.state |= LR_STATE_ACTIVE;
        if (val & ICH_LR_EOI)
                lr_desc.state |= LR_EOI_INT;
+       if (val & ICH_LR_HW) {
+               lr_desc.state |= LR_HW;
+               lr_desc.hwirq = (val >> ICH_LR_PHYS_ID_SHIFT) & GENMASK(9, 0);
+       }
 
        return lr_desc;
 }
         * Eventually we want to make this configurable, so we may revisit
         * this in the future.
         */
-       if (vcpu->kvm->arch.vgic.vgic_model == KVM_DEV_TYPE_ARM_VGIC_V3)
+       switch (vcpu->kvm->arch.vgic.vgic_model) {
+       case KVM_DEV_TYPE_ARM_VGIC_V3:
                lr_val |= ICH_LR_GROUP;
-       else
-               lr_val |= (u32)lr_desc.source << GICH_LR_PHYSID_CPUID_SHIFT;
+               break;
+       case  KVM_DEV_TYPE_ARM_VGIC_V2:
+               if (lr_desc.irq < VGIC_NR_SGIS)
+                       lr_val |= (u32)lr_desc.source << GICH_LR_PHYSID_CPUID_SHIFT;
+               break;
+       default:
+               BUG();
+       }
 
        if (lr_desc.state & LR_STATE_PENDING)
                lr_val |= ICH_LR_PENDING_BIT;
                lr_val |= ICH_LR_ACTIVE_BIT;
        if (lr_desc.state & LR_EOI_INT)
                lr_val |= ICH_LR_EOI;
+       if (lr_desc.state & LR_HW) {
+               lr_val |= ICH_LR_HW;
+               lr_val |= ((u64)lr_desc.hwirq) << ICH_LR_PHYS_ID_SHIFT;
+       }
 
        vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[LR_INDEX(lr)] = lr_val;
 }