@@ -586,6 +586,88 @@ static inline u8 * instr_va(struct alt_instr *i)
586586 return (u8 * )& i -> instr_offset + i -> instr_offset ;
587587}
588588
589+ struct patch_site {
590+ u8 * instr ;
591+ struct alt_instr * alt ;
592+ u8 buff [MAX_PATCH_LEN ];
593+ u8 len ;
594+ };
595+
596+ static void __init_or_module analyze_patch_site (struct patch_site * ps ,
597+ struct alt_instr * start ,
598+ struct alt_instr * end )
599+ {
600+ struct alt_instr * r ;
601+
602+ ps -> instr = instr_va (start );
603+ ps -> len = start -> instrlen ;
604+
605+ /*
606+ * In case of nested ALTERNATIVE()s the outer alternative might add
607+ * more padding. To ensure consistent patching find the max padding for
608+ * all alt_instr entries for this site (nested alternatives result in
609+ * consecutive entries).
610+ */
611+ for (r = start + 1 ; r < end && instr_va (r ) == ps -> instr ; r ++ ) {
612+ ps -> len = max (ps -> len , r -> instrlen );
613+ start -> instrlen = r -> instrlen = ps -> len ;
614+ }
615+
616+ BUG_ON (ps -> len > sizeof (ps -> buff ));
617+ BUG_ON (start -> cpuid >= (NCAPINTS + NBUGINTS ) * 32 );
618+
619+ /*
620+ * Patch if either:
621+ * - feature is present
622+ * - feature not present but ALT_FLAG_NOT is set to mean,
623+ * patch if feature is *NOT* present.
624+ */
625+ if (!boot_cpu_has (start -> cpuid ) == !(start -> flags & ALT_FLAG_NOT ))
626+ ps -> alt = NULL ;
627+ else
628+ ps -> alt = start ;
629+ }
630+
631+ static void __init_or_module prep_patch_site (struct patch_site * ps )
632+ {
633+ struct alt_instr * alt = ps -> alt ;
634+ u8 buff_sz ;
635+ u8 * repl ;
636+
637+ if (!alt ) {
638+ /* Nothing to patch, use original instruction. */
639+ memcpy (ps -> buff , ps -> instr , ps -> len );
640+ return ;
641+ }
642+
643+ repl = (u8 * )& alt -> repl_offset + alt -> repl_offset ;
644+ DPRINTK (ALT , "feat: %d*32+%d, old: (%pS (%px) len: %d), repl: (%px, len: %d) flags: 0x%x" ,
645+ alt -> cpuid >> 5 , alt -> cpuid & 0x1f ,
646+ ps -> instr , ps -> instr , ps -> len ,
647+ repl , alt -> replacementlen , alt -> flags );
648+
649+ memcpy (ps -> buff , repl , alt -> replacementlen );
650+ buff_sz = alt -> replacementlen ;
651+
652+ if (alt -> flags & ALT_FLAG_DIRECT_CALL )
653+ buff_sz = alt_replace_call (ps -> instr , ps -> buff , alt );
654+
655+ for (; buff_sz < ps -> len ; buff_sz ++ )
656+ ps -> buff [buff_sz ] = 0x90 ;
657+
658+ __apply_relocation (ps -> buff , ps -> instr , ps -> len , repl , alt -> replacementlen );
659+
660+ DUMP_BYTES (ALT , ps -> instr , ps -> len , "%px: old_insn: " , ps -> instr );
661+ DUMP_BYTES (ALT , repl , alt -> replacementlen , "%px: rpl_insn: " , repl );
662+ DUMP_BYTES (ALT , ps -> buff , ps -> len , "%px: final_insn: " , ps -> instr );
663+ }
664+
665+ static void __init_or_module patch_site (struct patch_site * ps )
666+ {
667+ optimize_nops (ps -> instr , ps -> buff , ps -> len );
668+ text_poke_early (ps -> instr , ps -> buff , ps -> len );
669+ }
670+
589671/*
590672 * Replace instructions with better alternatives for this CPU type. This runs
591673 * before SMP is initialized to avoid SMP problems with self modifying code.
@@ -599,9 +681,7 @@ static inline u8 * instr_va(struct alt_instr *i)
599681void __init_or_module noinline apply_alternatives (struct alt_instr * start ,
600682 struct alt_instr * end )
601683{
602- u8 insn_buff [MAX_PATCH_LEN ];
603- u8 * instr , * replacement ;
604- struct alt_instr * a , * b ;
684+ struct alt_instr * a ;
605685
606686 DPRINTK (ALT , "alt table %px, -> %px" , start , end );
607687
@@ -625,59 +705,11 @@ void __init_or_module noinline apply_alternatives(struct alt_instr *start,
625705 * order.
626706 */
627707 for (a = start ; a < end ; a ++ ) {
628- unsigned int insn_buff_sz = 0 ;
629-
630- /*
631- * In case of nested ALTERNATIVE()s the outer alternative might
632- * add more padding. To ensure consistent patching find the max
633- * padding for all alt_instr entries for this site (nested
634- * alternatives result in consecutive entries).
635- */
636- for (b = a + 1 ; b < end && instr_va (b ) == instr_va (a ); b ++ ) {
637- u8 len = max (a -> instrlen , b -> instrlen );
638- a -> instrlen = b -> instrlen = len ;
639- }
640-
641- instr = instr_va (a );
642- replacement = (u8 * )& a -> repl_offset + a -> repl_offset ;
643- BUG_ON (a -> instrlen > sizeof (insn_buff ));
644- BUG_ON (a -> cpuid >= (NCAPINTS + NBUGINTS ) * 32 );
645-
646- /*
647- * Patch if either:
648- * - feature is present
649- * - feature not present but ALT_FLAG_NOT is set to mean,
650- * patch if feature is *NOT* present.
651- */
652- if (!boot_cpu_has (a -> cpuid ) == !(a -> flags & ALT_FLAG_NOT )) {
653- memcpy (insn_buff , instr , a -> instrlen );
654- optimize_nops (instr , insn_buff , a -> instrlen );
655- text_poke_early (instr , insn_buff , a -> instrlen );
656- continue ;
657- }
658-
659- DPRINTK (ALT , "feat: %d*32+%d, old: (%pS (%px) len: %d), repl: (%px, len: %d) flags: 0x%x" ,
660- a -> cpuid >> 5 ,
661- a -> cpuid & 0x1f ,
662- instr , instr , a -> instrlen ,
663- replacement , a -> replacementlen , a -> flags );
664-
665- memcpy (insn_buff , replacement , a -> replacementlen );
666- insn_buff_sz = a -> replacementlen ;
667-
668- if (a -> flags & ALT_FLAG_DIRECT_CALL )
669- insn_buff_sz = alt_replace_call (instr , insn_buff , a );
670-
671- for (; insn_buff_sz < a -> instrlen ; insn_buff_sz ++ )
672- insn_buff [insn_buff_sz ] = 0x90 ;
673-
674- text_poke_apply_relocation (insn_buff , instr , a -> instrlen , replacement , a -> replacementlen );
675-
676- DUMP_BYTES (ALT , instr , a -> instrlen , "%px: old_insn: " , instr );
677- DUMP_BYTES (ALT , replacement , a -> replacementlen , "%px: rpl_insn: " , replacement );
678- DUMP_BYTES (ALT , insn_buff , insn_buff_sz , "%px: final_insn: " , instr );
708+ struct patch_site ps ;
679709
680- text_poke_early (instr , insn_buff , insn_buff_sz );
710+ analyze_patch_site (& ps , a , end );
711+ prep_patch_site (& ps );
712+ patch_site (& ps );
681713 }
682714
683715 kasan_enable_current ();
0 commit comments