@@ -569,11 +569,11 @@ static int __vgic_v3_highest_priority_lr(struct kvm_vcpu *vcpu, u32 vmcr,
569569 continue ;
570570
571571 /* Group-0 interrupt, but Group-0 disabled? */
572- if (!(val & ICH_LR_GROUP ) && !(vmcr & ICH_VMCR_ENG0_MASK ))
572+ if (!(val & ICH_LR_GROUP ) && !(vmcr & ICH_VMCR_EL2_VENG0_MASK ))
573573 continue ;
574574
575575 /* Group-1 interrupt, but Group-1 disabled? */
576- if ((val & ICH_LR_GROUP ) && !(vmcr & ICH_VMCR_ENG1_MASK ))
576+ if ((val & ICH_LR_GROUP ) && !(vmcr & ICH_VMCR_EL2_VENG1_MASK ))
577577 continue ;
578578
579579 /* Not the highest priority? */
@@ -646,19 +646,19 @@ static int __vgic_v3_get_highest_active_priority(void)
646646
647647static unsigned int __vgic_v3_get_bpr0 (u32 vmcr )
648648{
649- return ( vmcr & ICH_VMCR_BPR0_MASK ) >> ICH_VMCR_BPR0_SHIFT ;
649+ return FIELD_GET ( ICH_VMCR_EL2_VBPR0 , vmcr ) ;
650650}
651651
652652static unsigned int __vgic_v3_get_bpr1 (u32 vmcr )
653653{
654654 unsigned int bpr ;
655655
656- if (vmcr & ICH_VMCR_CBPR_MASK ) {
656+ if (vmcr & ICH_VMCR_EL2_VCBPR_MASK ) {
657657 bpr = __vgic_v3_get_bpr0 (vmcr );
658658 if (bpr < 7 )
659659 bpr ++ ;
660660 } else {
661- bpr = ( vmcr & ICH_VMCR_BPR1_MASK ) >> ICH_VMCR_BPR1_SHIFT ;
661+ bpr = FIELD_GET ( ICH_VMCR_EL2_VBPR1 , vmcr ) ;
662662 }
663663
664664 return bpr ;
@@ -758,7 +758,7 @@ static void __vgic_v3_read_iar(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
758758 if (grp != !!(lr_val & ICH_LR_GROUP ))
759759 goto spurious ;
760760
761- pmr = ( vmcr & ICH_VMCR_PMR_MASK ) >> ICH_VMCR_PMR_SHIFT ;
761+ pmr = FIELD_GET ( ICH_VMCR_EL2_VPMR , vmcr ) ;
762762 lr_prio = (lr_val & ICH_LR_PRIORITY_MASK ) >> ICH_LR_PRIORITY_SHIFT ;
763763 if (pmr <= lr_prio )
764764 goto spurious ;
@@ -806,7 +806,7 @@ static int ___vgic_v3_write_dir(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
806806 int lr ;
807807
808808 /* EOImode == 0, nothing to be done here */
809- if (!(vmcr & ICH_VMCR_EOIM_MASK ))
809+ if (!(vmcr & ICH_VMCR_EL2_VEOIM_MASK ))
810810 return 1 ;
811811
812812 /* No deactivate to be performed on an LPI */
@@ -849,7 +849,7 @@ static void __vgic_v3_write_eoir(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
849849 }
850850
851851 /* EOImode == 1 and not an LPI, nothing to be done here */
852- if ((vmcr & ICH_VMCR_EOIM_MASK ) && !(vid >= VGIC_MIN_LPI ))
852+ if ((vmcr & ICH_VMCR_EL2_VEOIM_MASK ) && !(vid >= VGIC_MIN_LPI ))
853853 return ;
854854
855855 lr_prio = (lr_val & ICH_LR_PRIORITY_MASK ) >> ICH_LR_PRIORITY_SHIFT ;
@@ -865,22 +865,19 @@ static void __vgic_v3_write_eoir(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
865865
866866static void __vgic_v3_read_igrpen0 (struct kvm_vcpu * vcpu , u32 vmcr , int rt )
867867{
868- vcpu_set_reg (vcpu , rt , !!( vmcr & ICH_VMCR_ENG0_MASK ));
868+ vcpu_set_reg (vcpu , rt , FIELD_GET ( ICH_VMCR_EL2_VENG0 , vmcr ));
869869}
870870
871871static void __vgic_v3_read_igrpen1 (struct kvm_vcpu * vcpu , u32 vmcr , int rt )
872872{
873- vcpu_set_reg (vcpu , rt , !!( vmcr & ICH_VMCR_ENG1_MASK ));
873+ vcpu_set_reg (vcpu , rt , FIELD_GET ( ICH_VMCR_EL2_VENG1 , vmcr ));
874874}
875875
876876static void __vgic_v3_write_igrpen0 (struct kvm_vcpu * vcpu , u32 vmcr , int rt )
877877{
878878 u64 val = vcpu_get_reg (vcpu , rt );
879879
880- if (val & 1 )
881- vmcr |= ICH_VMCR_ENG0_MASK ;
882- else
883- vmcr &= ~ICH_VMCR_ENG0_MASK ;
880+ FIELD_MODIFY (ICH_VMCR_EL2_VENG0 , & vmcr , val & 1 );
884881
885882 __vgic_v3_write_vmcr (vmcr );
886883}
@@ -889,10 +886,7 @@ static void __vgic_v3_write_igrpen1(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
889886{
890887 u64 val = vcpu_get_reg (vcpu , rt );
891888
892- if (val & 1 )
893- vmcr |= ICH_VMCR_ENG1_MASK ;
894- else
895- vmcr &= ~ICH_VMCR_ENG1_MASK ;
889+ FIELD_MODIFY (ICH_VMCR_EL2_VENG1 , & vmcr , val & 1 );
896890
897891 __vgic_v3_write_vmcr (vmcr );
898892}
@@ -916,10 +910,7 @@ static void __vgic_v3_write_bpr0(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
916910 if (val < bpr_min )
917911 val = bpr_min ;
918912
919- val <<= ICH_VMCR_BPR0_SHIFT ;
920- val &= ICH_VMCR_BPR0_MASK ;
921- vmcr &= ~ICH_VMCR_BPR0_MASK ;
922- vmcr |= val ;
913+ FIELD_MODIFY (ICH_VMCR_EL2_VBPR0 , & vmcr , val );
923914
924915 __vgic_v3_write_vmcr (vmcr );
925916}
@@ -929,17 +920,14 @@ static void __vgic_v3_write_bpr1(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
929920 u64 val = vcpu_get_reg (vcpu , rt );
930921 u8 bpr_min = __vgic_v3_bpr_min ();
931922
932- if (vmcr & ICH_VMCR_CBPR_MASK )
923+ if (FIELD_GET ( ICH_VMCR_EL2_VCBPR , val ) )
933924 return ;
934925
935926 /* Enforce BPR limiting */
936927 if (val < bpr_min )
937928 val = bpr_min ;
938929
939- val <<= ICH_VMCR_BPR1_SHIFT ;
940- val &= ICH_VMCR_BPR1_MASK ;
941- vmcr &= ~ICH_VMCR_BPR1_MASK ;
942- vmcr |= val ;
930+ FIELD_MODIFY (ICH_VMCR_EL2_VBPR1 , & vmcr , val );
943931
944932 __vgic_v3_write_vmcr (vmcr );
945933}
@@ -1029,19 +1017,14 @@ static void __vgic_v3_read_hppir(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
10291017
10301018static void __vgic_v3_read_pmr (struct kvm_vcpu * vcpu , u32 vmcr , int rt )
10311019{
1032- vmcr &= ICH_VMCR_PMR_MASK ;
1033- vmcr >>= ICH_VMCR_PMR_SHIFT ;
1034- vcpu_set_reg (vcpu , rt , vmcr );
1020+ vcpu_set_reg (vcpu , rt , FIELD_GET (ICH_VMCR_EL2_VPMR , vmcr ));
10351021}
10361022
10371023static void __vgic_v3_write_pmr (struct kvm_vcpu * vcpu , u32 vmcr , int rt )
10381024{
10391025 u32 val = vcpu_get_reg (vcpu , rt );
10401026
1041- val <<= ICH_VMCR_PMR_SHIFT ;
1042- val &= ICH_VMCR_PMR_MASK ;
1043- vmcr &= ~ICH_VMCR_PMR_MASK ;
1044- vmcr |= val ;
1027+ FIELD_MODIFY (ICH_VMCR_EL2_VPMR , & vmcr , val );
10451028
10461029 write_gicreg (vmcr , ICH_VMCR_EL2 );
10471030}
@@ -1064,9 +1047,11 @@ static void __vgic_v3_read_ctlr(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
10641047 /* A3V */
10651048 val |= ((vtr >> 21 ) & 1 ) << ICC_CTLR_EL1_A3V_SHIFT ;
10661049 /* EOImode */
1067- val |= ((vmcr & ICH_VMCR_EOIM_MASK ) >> ICH_VMCR_EOIM_SHIFT ) << ICC_CTLR_EL1_EOImode_SHIFT ;
1050+ val |= FIELD_PREP (ICC_CTLR_EL1_EOImode_MASK ,
1051+ FIELD_GET (ICH_VMCR_EL2_VEOIM , vmcr ));
10681052 /* CBPR */
1069- val |= (vmcr & ICH_VMCR_CBPR_MASK ) >> ICH_VMCR_CBPR_SHIFT ;
1053+ val |= FIELD_PREP (ICC_CTLR_EL1_CBPR_MASK ,
1054+ FIELD_GET (ICH_VMCR_EL2_VCBPR , vmcr ));
10701055
10711056 vcpu_set_reg (vcpu , rt , val );
10721057}
@@ -1075,15 +1060,11 @@ static void __vgic_v3_write_ctlr(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
10751060{
10761061 u32 val = vcpu_get_reg (vcpu , rt );
10771062
1078- if (val & ICC_CTLR_EL1_CBPR_MASK )
1079- vmcr |= ICH_VMCR_CBPR_MASK ;
1080- else
1081- vmcr &= ~ICH_VMCR_CBPR_MASK ;
1063+ FIELD_MODIFY (ICH_VMCR_EL2_VCBPR , & vmcr ,
1064+ FIELD_GET (ICC_CTLR_EL1_CBPR_MASK , val ));
10821065
1083- if (val & ICC_CTLR_EL1_EOImode_MASK )
1084- vmcr |= ICH_VMCR_EOIM_MASK ;
1085- else
1086- vmcr &= ~ICH_VMCR_EOIM_MASK ;
1066+ FIELD_MODIFY (ICH_VMCR_EL2_VEOIM , & vmcr ,
1067+ FIELD_GET (ICC_CTLR_EL1_EOImode_MASK , val ));
10871068
10881069 write_gicreg (vmcr , ICH_VMCR_EL2 );
10891070}
0 commit comments