if (!pmc)
                return 1;
 
-       if (!(kvm_read_cr4(vcpu) & X86_CR4_PCE) &&
+       if (!(kvm_read_cr4_bits(vcpu, X86_CR4_PCE)) &&
            (static_call(kvm_x86_get_cpl)(vcpu) != 0) &&
-           (kvm_read_cr0(vcpu) & X86_CR0_PE))
+           (kvm_read_cr0_bits(vcpu, X86_CR0_PE)))
                return 1;
 
        *data = pmc_read_counter(pmc) & mask;
 
                break;
        case 3: /* lmsw */
                val = (exit_qualification >> LMSW_SOURCE_DATA_SHIFT) & 0x0f;
-               trace_kvm_cr_write(0, (kvm_read_cr0(vcpu) & ~0xful) | val);
+               trace_kvm_cr_write(0, (kvm_read_cr0_bits(vcpu, ~0xful) | val));
                kvm_lmsw(vcpu, val);
 
                return kvm_skip_emulated_instruction(vcpu);
        if (!kvm_arch_has_noncoherent_dma(vcpu->kvm))
                return (MTRR_TYPE_WRBACK << VMX_EPT_MT_EPTE_SHIFT) | VMX_EPT_IPAT_BIT;
 
-       if (kvm_read_cr0(vcpu) & X86_CR0_CD) {
+       if (kvm_read_cr0_bits(vcpu, X86_CR0_CD)) {
                if (kvm_check_has_quirk(vcpu->kvm, KVM_X86_QUIRK_CD_NW_CLEARED))
                        cache = MTRR_TYPE_WRBACK;
                else