| X86_CR0_ET | X86_CR0_NE | X86_CR0_WP | X86_CR0_AM \
                          | X86_CR0_NW | X86_CR0_CD | X86_CR0_PG))
 
-#define CR3_PAE_RESERVED_BITS ((X86_CR3_PWT | X86_CR3_PCD) - 1)
-#define CR3_NONPAE_RESERVED_BITS ((PAGE_SIZE-1) & ~(X86_CR3_PWT | X86_CR3_PCD))
-#define CR3_PCID_ENABLED_RESERVED_BITS 0xFFFFFF0000000000ULL
-#define CR3_L_MODE_RESERVED_BITS (CR3_NONPAE_RESERVED_BITS |   \
-                                 0xFFFFFF0000000000ULL)
+#define CR3_L_MODE_RESERVED_BITS 0xFFFFFF0000000000ULL
 #define CR4_RESERVED_BITS                                               \
        (~(unsigned long)(X86_CR4_VME | X86_CR4_PVI | X86_CR4_TSD | X86_CR4_DE\
                          | X86_CR4_PSE | X86_CR4_PAE | X86_CR4_MCE     \
 
                ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
                if (efer & EFER_LMA)
                        rsvd = CR3_L_MODE_RESERVED_BITS;
-               else if (ctxt->ops->get_cr(ctxt, 4) & X86_CR4_PAE)
-                       rsvd = CR3_PAE_RESERVED_BITS;
-               else if (ctxt->ops->get_cr(ctxt, 0) & X86_CR0_PG)
-                       rsvd = CR3_NONPAE_RESERVED_BITS;
 
                if (new_val & rsvd)
                        return emulate_gp(ctxt, 0);
 
                return 0;
        }
 
-       if (is_long_mode(vcpu)) {
-               if (kvm_read_cr4_bits(vcpu, X86_CR4_PCIDE)) {
-                       if (cr3 & CR3_PCID_ENABLED_RESERVED_BITS)
-                               return 1;
-               } else
-                       if (cr3 & CR3_L_MODE_RESERVED_BITS)
-                               return 1;
-       } else {
-               if (is_pae(vcpu)) {
-                       if (cr3 & CR3_PAE_RESERVED_BITS)
-                               return 1;
-                       if (is_paging(vcpu) &&
-                           !load_pdptrs(vcpu, vcpu->arch.walk_mmu, cr3))
-                               return 1;
-               }
-               /*
-                * We don't check reserved bits in nonpae mode, because
-                * this isn't enforced, and VMware depends on this.
-                */
-       }
+       if (is_long_mode(vcpu) && (cr3 & CR3_L_MODE_RESERVED_BITS))
+               return 1;
+       if (is_pae(vcpu) && is_paging(vcpu) &&
+           !load_pdptrs(vcpu, vcpu->arch.walk_mmu, cr3))
+               return 1;
 
        vcpu->arch.cr3 = cr3;
        __set_bit(VCPU_EXREG_CR3, (ulong *)&vcpu->arch.regs_avail);