Module Name:    src
Committed By:   knakahara
Date:           Wed Sep  7 00:40:19 UTC 2022

Modified Files:
        src/sys/arch/amd64/amd64: amd64_trap.S genassym.cf lock_stubs.S
            locore.S spl.S vector.S
        src/sys/arch/i386/i386: genassym.cf i386_trap.S lock_stubs.S locore.S
            spl.S vector.S
        src/sys/arch/x86/include: cpu.h intr.h intrdefs.h
        src/sys/arch/x86/x86: intr.c lapic.c x86_softintr.c
        src/sys/arch/xen/include: hypervisor.h intr.h
        src/sys/arch/xen/x86: hypervisor_machdep.c
        src/sys/arch/xen/xen: evtchn.c xenevt.c

Log Message:
NetBSD/x86: Raise the number of interrupt sources per CPU from 32 to 56.

There has been no objection for three years.
    https://mail-index.netbsd.org/port-amd64/2019/09/22/msg003012.html
Implemented by nonaka@n.o, updated by me.


To generate a diff of this commit:
cvs rdiff -u -r1.53 -r1.54 src/sys/arch/amd64/amd64/amd64_trap.S
cvs rdiff -u -r1.88 -r1.89 src/sys/arch/amd64/amd64/genassym.cf
cvs rdiff -u -r1.36 -r1.37 src/sys/arch/amd64/amd64/lock_stubs.S
cvs rdiff -u -r1.213 -r1.214 src/sys/arch/amd64/amd64/locore.S
cvs rdiff -u -r1.47 -r1.48 src/sys/arch/amd64/amd64/spl.S
cvs rdiff -u -r1.78 -r1.79 src/sys/arch/amd64/amd64/vector.S
cvs rdiff -u -r1.125 -r1.126 src/sys/arch/i386/i386/genassym.cf
cvs rdiff -u -r1.22 -r1.23 src/sys/arch/i386/i386/i386_trap.S
cvs rdiff -u -r1.36 -r1.37 src/sys/arch/i386/i386/lock_stubs.S
cvs rdiff -u -r1.189 -r1.190 src/sys/arch/i386/i386/locore.S
cvs rdiff -u -r1.55 -r1.56 src/sys/arch/i386/i386/spl.S
cvs rdiff -u -r1.88 -r1.89 src/sys/arch/i386/i386/vector.S
cvs rdiff -u -r1.132 -r1.133 src/sys/arch/x86/include/cpu.h
cvs rdiff -u -r1.65 -r1.66 src/sys/arch/x86/include/intr.h
cvs rdiff -u -r1.25 -r1.26 src/sys/arch/x86/include/intrdefs.h
cvs rdiff -u -r1.160 -r1.161 src/sys/arch/x86/x86/intr.c
cvs rdiff -u -r1.88 -r1.89 src/sys/arch/x86/x86/lapic.c
cvs rdiff -u -r1.3 -r1.4 src/sys/arch/x86/x86/x86_softintr.c
cvs rdiff -u -r1.54 -r1.55 src/sys/arch/xen/include/hypervisor.h
cvs rdiff -u -r1.59 -r1.60 src/sys/arch/xen/include/intr.h
cvs rdiff -u -r1.44 -r1.45 src/sys/arch/xen/x86/hypervisor_machdep.c
cvs rdiff -u -r1.99 -r1.100 src/sys/arch/xen/xen/evtchn.c
cvs rdiff -u -r1.66 -r1.67 src/sys/arch/xen/xen/xenevt.c

Please note that diffs are not public domain; they are subject to the
copyright notices on the relevant files.

Modified files:

Index: src/sys/arch/amd64/amd64/amd64_trap.S
diff -u src/sys/arch/amd64/amd64/amd64_trap.S:1.53 src/sys/arch/amd64/amd64/amd64_trap.S:1.54
--- src/sys/arch/amd64/amd64/amd64_trap.S:1.53	Mon Jun 29 23:04:56 2020
+++ src/sys/arch/amd64/amd64/amd64_trap.S	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: amd64_trap.S,v 1.53 2020/06/29 23:04:56 riastradh Exp $	*/
+/*	$NetBSD: amd64_trap.S,v 1.54 2022/09/07 00:40:18 knakahara Exp $	*/
 
 /*
  * Copyright (c) 1998, 2007, 2008, 2017 The NetBSD Foundation, Inc.
@@ -347,7 +347,7 @@ IDTVEC(trap07)
 	ZTRAP_NJ(T_DNA)
 	INTRENTRY
 #ifdef DIAGNOSTIC
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 #endif
 	movq	%rsp,%rdi
 	call	_C_LABEL(fpudna)
@@ -439,7 +439,7 @@ IDTVEC(trap15)
 	ZTRAP_NJ(T_ASTFLT)
 	INTRENTRY
 #ifdef DIAGNOSTIC
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 #endif
 	jmp	.Lalltraps_checkusr
 IDTVEC_END(trap15)
@@ -457,7 +457,7 @@ IDTVEC(trap16)
 	HANDLE_DEFERRED_FPU
 #endif /* XENPV */
 #ifdef DIAGNOSTIC
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 #endif
 	movq	%rsp,%rdi
 	call	_C_LABEL(fputrap)
@@ -508,7 +508,7 @@ IDTVEC(intrspurious)
 	ZTRAP_NJ(T_ASTFLT)
 	INTRENTRY
 #ifdef DIAGNOSTIC
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 #endif
 	jmp	.Lalltraps_checkusr
 IDTVEC_END(intrspurious)
@@ -658,7 +658,7 @@ ENTRY(alltraps)
 
 calltrap:
 #ifdef DIAGNOSTIC
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 #endif
 	movq	%rsp,%rdi
 	incq	CPUVAR(NTRAP)
@@ -688,7 +688,7 @@ calltrap:
 
 6:
 #ifdef DIAGNOSTIC
-	cmpl	CPUVAR(ILEVEL),%ebx
+	cmpb	CPUVAR(ILEVEL),%bl
 	jne	.Lspl_error
 #endif
 	INTRFASTEXIT
@@ -701,7 +701,7 @@ calltrap:
 .Lspl_error:
 	STI(si)
 	movabsq	$4f,%rdi
-	movl	CPUVAR(ILEVEL),%esi
+	movzbl	CPUVAR(ILEVEL),%esi
 	call	_C_LABEL(panic)
 4:	.asciz	"spl not lowered on trap exit, ilevel=%x"
 #endif

Index: src/sys/arch/amd64/amd64/genassym.cf
diff -u src/sys/arch/amd64/amd64/genassym.cf:1.88 src/sys/arch/amd64/amd64/genassym.cf:1.89
--- src/sys/arch/amd64/amd64/genassym.cf:1.88	Sat Aug 20 23:48:50 2022
+++ src/sys/arch/amd64/amd64/genassym.cf	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-#	$NetBSD: genassym.cf,v 1.88 2022/08/20 23:48:50 riastradh Exp $
+#	$NetBSD: genassym.cf,v 1.89 2022/09/07 00:40:18 knakahara Exp $
 
 #
 # Copyright (c) 1998, 2006, 2007, 2008 The NetBSD Foundation, Inc.
@@ -247,9 +247,9 @@ define	CPU_INFO_NINTR		offsetof(struct c
 define	CPU_INFO_CURPRIORITY	offsetof(struct cpu_info, ci_schedstate.spc_curpriority)
 
 define	CPU_INFO_GDT		offsetof(struct cpu_info, ci_gdt)
-define	CPU_INFO_ILEVEL		offsetof(struct cpu_info, ci_ilevel)
+define	CPU_INFO_ILEVEL		(offsetof(struct cpu_info, ci_istate) + 7)
 define	CPU_INFO_IDEPTH		offsetof(struct cpu_info, ci_idepth)
-define	CPU_INFO_IPENDING	offsetof(struct cpu_info, ci_ipending)
+define	CPU_INFO_IPENDING	offsetof(struct cpu_info, ci_istate)
 define	CPU_INFO_IMASKED	offsetof(struct cpu_info, ci_imasked)
 define	CPU_INFO_IMASK		offsetof(struct cpu_info, ci_imask)
 define	CPU_INFO_IUNMASK	offsetof(struct cpu_info, ci_iunmask)

Index: src/sys/arch/amd64/amd64/lock_stubs.S
diff -u src/sys/arch/amd64/amd64/lock_stubs.S:1.36 src/sys/arch/amd64/amd64/lock_stubs.S:1.37
--- src/sys/arch/amd64/amd64/lock_stubs.S:1.36	Sat Apr 25 15:26:16 2020
+++ src/sys/arch/amd64/amd64/lock_stubs.S	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: lock_stubs.S,v 1.36 2020/04/25 15:26:16 bouyer Exp $	*/
+/*	$NetBSD: lock_stubs.S,v 1.37 2022/09/07 00:40:18 knakahara Exp $	*/
 
 /*
  * Copyright (c) 2006, 2007, 2008, 2009 The NetBSD Foundation, Inc.
@@ -91,11 +91,11 @@ END(mutex_exit)
  */
 ENTRY(mutex_spin_enter)
 	movl	$1, %eax
-	movl	CPUVAR(ILEVEL), %esi
+	movzbl	CPUVAR(ILEVEL), %esi
 	movzbl	MTX_IPL(%rdi), %ecx		/* new SPL */
 	cmpl	%ecx, %esi			/* higher? */
 	cmovgl	%esi, %ecx
-	movl	%ecx, CPUVAR(ILEVEL)		/* splraiseipl() */
+	movb	%cl, CPUVAR(ILEVEL)		/* splraiseipl() */
 	subl	%eax, CPUVAR(MTX_COUNT)		/* decl doesnt set CF */
 	cmovncl	CPUVAR(MTX_OLDSPL), %esi
 	movl	%esi, CPUVAR(MTX_OLDSPL)

Index: src/sys/arch/amd64/amd64/locore.S
diff -u src/sys/arch/amd64/amd64/locore.S:1.213 src/sys/arch/amd64/amd64/locore.S:1.214
--- src/sys/arch/amd64/amd64/locore.S:1.213	Sun Jun 12 11:36:42 2022
+++ src/sys/arch/amd64/amd64/locore.S	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: locore.S,v 1.213 2022/06/12 11:36:42 bouyer Exp $	*/
+/*	$NetBSD: locore.S,v 1.214 2022/09/07 00:40:18 knakahara Exp $	*/
 
 /*
  * Copyright-o-rama!
@@ -1266,7 +1266,7 @@ ENTRY(cpu_switchto)
 #ifndef XENPV
 	/* Raise the IPL to IPL_HIGH. Dropping the priority is deferred until
 	 * mi_switch(), when cpu_switchto() returns. XXX Still needed? */
-	movl	$IPL_HIGH,CPUVAR(ILEVEL)
+	movb	$IPL_HIGH,CPUVAR(ILEVEL)
 
 	/* The 32bit LWPs are handled differently. */
 	testl	$PCB_COMPAT32,PCB_FLAGS(%r14)
@@ -1376,7 +1376,7 @@ ENTRY(handle_syscall)
 	jnz	9f
 
 #ifdef DIAGNOSTIC
-	cmpl	$IPL_NONE,CPUVAR(ILEVEL)
+	cmpb	$IPL_NONE,CPUVAR(ILEVEL)
 	jne	.Lspl_error
 #endif
 
@@ -1398,7 +1398,7 @@ ENTRY(handle_syscall)
 #ifdef DIAGNOSTIC
 .Lspl_error:
 	movabsq	$4f,%rdi
-	movl	CPUVAR(ILEVEL),%esi
+	movzbl	CPUVAR(ILEVEL),%esi
 	call	_C_LABEL(panic)
 4:	.asciz	"spl not lowered on syscall, ilevel=%x"
 #endif

Index: src/sys/arch/amd64/amd64/spl.S
diff -u src/sys/arch/amd64/amd64/spl.S:1.47 src/sys/arch/amd64/amd64/spl.S:1.48
--- src/sys/arch/amd64/amd64/spl.S:1.47	Sat Aug 29 07:16:03 2020
+++ src/sys/arch/amd64/amd64/spl.S	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: spl.S,v 1.47 2020/08/29 07:16:03 maxv Exp $	*/
+/*	$NetBSD: spl.S,v 1.48 2022/09/07 00:40:18 knakahara Exp $	*/
 
 /*
  * Copyright (c) 2003 Wasabi Systems, Inc.
@@ -83,10 +83,10 @@
  * int splraise(int s);
  */
 ENTRY(splraise)
-	movl	CPUVAR(ILEVEL),%eax
+	movzbl	CPUVAR(ILEVEL),%eax
 	cmpl	%edi,%eax
 	cmoval	%eax,%edi
-	movl	%edi,CPUVAR(ILEVEL)
+	movb	%dil,CPUVAR(ILEVEL)
 	KMSAN_INIT_RET(4)
 	ret
 END(splraise)
@@ -111,7 +111,7 @@ IDTVEC(softintr)
 	pushq	%r14
 	pushq	%r15
 
-	movl	$IPL_HIGH,CPUVAR(ILEVEL)
+	movb	$IPL_HIGH,CPUVAR(ILEVEL)
 	movq	CPUVAR(CURLWP),%r15
 	movq	IS_LWP(%rax),%rdi	/* switch to handler LWP */
 	movq	L_PCB(%rdi),%rdx
@@ -188,7 +188,9 @@ END(softintr_ret)
  * Software interrupt registration.
  */
 ENTRY(softint_trigger)
-	orl	%edi,CPUVAR(IPENDING)	/* atomic on local cpu */
+	shlq	$8,%rdi			/* clear upper 8 bits */
+	shrq	$8,%rdi
+	orq	%rdi,CPUVAR(IPENDING)	/* atomic on local cpu */
 	ret
 END(softint_trigger)
 
@@ -198,7 +200,7 @@ END(softint_trigger)
  * Handles preemption interrupts via Xspllower().
  */
 IDTVEC(recurse_preempt)
-	movl	$IPL_PREEMPT,CPUVAR(ILEVEL)
+	movb	$IPL_PREEMPT,CPUVAR(ILEVEL)
 	STI(di)
 	xorq	%rdi,%rdi
 	KMSAN_INIT_ARG(8)
@@ -213,7 +215,7 @@ IDTVEC_END(recurse_preempt)
  * Handles preemption interrupts via Xdoreti().
  */
 IDTVEC(resume_preempt)
-	movl	$IPL_PREEMPT,CPUVAR(ILEVEL)
+	movb	$IPL_PREEMPT,CPUVAR(ILEVEL)
 	STI(ax)
 	testq	$SEL_RPL,TF_CS(%rsp)
 	jnz	1f
@@ -230,34 +232,32 @@ IDTVEC_END(resume_preempt)
 
 /*
  * void spllower(int s);
- *
- * For cmpxchg8b, edx/ecx are the high words and eax/ebx the low.
- *
- * edx : eax = old level / old ipending
- * ecx : ebx = new level / old ipending
  */
 ENTRY(spllower)
-	movl	CPUVAR(ILEVEL),%edx
-	movq	%rbx,%r8
+	movzbl	CPUVAR(ILEVEL),%edx
 	cmpl	%edx,%edi			/* new level is lower? */
 	jae	1f
+	xorq	%rcx,%rcx			/* rcx: ci_ipending mask */
+	notq	%rcx
+	shrq	$8,%rcx
+	movq	%rdi,%r9			/* r9: shifted new level */
+	shlq	$56,%r9
 0:
-	movl	CPUVAR(IPENDING),%eax
-	movl	%edi,%ecx
-	testl	%eax,CPUVAR(IUNMASK)(,%rcx,4)/* deferred interrupts? */
-	movl	%eax,%ebx
+	movq	CPUVAR(IPENDING),%rax
+	testq	%rax,CPUVAR(IUNMASK)(,%rdi,8)	/* deferred interrupts? */
 	/*
 	 * On the P4 this jump is cheaper than patching in junk
 	 * using cmov.  Is cmpxchg expensive if it fails?
 	 */
 	jnz	2f
-	cmpxchg8b CPUVAR(ISTATE)		/* swap in new ilevel */
+	movq	%rax,%r8
+	andq	%rcx,%r8
+	orq	%r9,%r8
+	cmpxchgq %r8,CPUVAR(ISTATE)		/* swap in new ilevel */
 	jnz	0b
 1:
-	movq	%r8,%rbx
 	ret
 2:
-	movq	%r8,%rbx
 	jmp	_C_LABEL(Xspllower)
 END(spllower)
 
@@ -286,16 +286,16 @@ IDTVEC(spllower)
 	leaq	1f(%rip),%r13		/* address to resume loop at */
 1:
 	movl	%ebx,%eax		/* get cpl */
-	movl	CPUVAR(IUNMASK)(,%rax,4),%eax
+	movq	CPUVAR(IUNMASK)(,%rax,8),%rax
 	CLI(si)
-	andl	CPUVAR(IPENDING),%eax	/* any non-masked bits left? */
+	andq	CPUVAR(IPENDING),%rax	/* any non-masked bits left? */
 	jz	2f
-	bsrl	%eax,%eax
-	btrl	%eax,CPUVAR(IPENDING)
+	bsrq	%rax,%rax
+	btrq	%rax,CPUVAR(IPENDING)
 	movq	CPUVAR(ISOURCES)(,%rax,8),%rax
 	jmp	*IS_RECURSE(%rax)
 2:
-	movl	%ebx,CPUVAR(ILEVEL)
+	movb	%bl,CPUVAR(ILEVEL)
 	STI(si)
 	popq	%r12
 	popq	%r13
@@ -318,16 +318,16 @@ IDTVEC(doreti)
 	leaq	1f(%rip),%r13
 1:
 	movl    %ebx,%eax
-	movl	CPUVAR(IUNMASK)(,%rax,4),%eax
+	movq	CPUVAR(IUNMASK)(,%rax,8),%rax
 	CLI(si)
-	andl	CPUVAR(IPENDING),%eax
+	andq	CPUVAR(IPENDING),%rax
 	jz	2f
-	bsrl	%eax,%eax		/* slow, but not worth optimizing */
-	btrl	%eax,CPUVAR(IPENDING)
+	bsrq	%rax,%rax		/* slow, but not worth optimizing */
+	btrq	%rax,CPUVAR(IPENDING)
 	movq	CPUVAR(ISOURCES)(,%rax,8),%rax
 	jmp	*IS_RESUME(%rax)
 2:	/* Check for ASTs on exit to user mode. */
-	movl	%ebx,CPUVAR(ILEVEL)
+	movb	%bl,CPUVAR(ILEVEL)
 5:
 	testb	$SEL_RPL,TF_CS(%rsp)
 	jz	6f

Index: src/sys/arch/amd64/amd64/vector.S
diff -u src/sys/arch/amd64/amd64/vector.S:1.78 src/sys/arch/amd64/amd64/vector.S:1.79
--- src/sys/arch/amd64/amd64/vector.S:1.78	Tue May 24 15:55:19 2022
+++ src/sys/arch/amd64/amd64/vector.S	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: vector.S,v 1.78 2022/05/24 15:55:19 bouyer Exp $	*/
+/*	$NetBSD: vector.S,v 1.79 2022/09/07 00:40:18 knakahara Exp $	*/
 
 /*
  * Copyright (c) 1998, 2007, 2008 The NetBSD Foundation, Inc.
@@ -123,7 +123,7 @@ IDTVEC(handle_x2apic_ipi)
 	xorl	%eax,%eax
 	xorl	%edx,%edx
 	wrmsr
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 	cmpl	$IPL_HIGH,%ebx
 	jae	2f
 	jmp	1f
@@ -131,7 +131,7 @@ IDTVEC_END(handle_x2apic_ipi)
 IDTVEC(handle_lapic_ipi)
 	movq	_C_LABEL(local_apic_va),%rbx
 	movl	$0,LAPIC_EOI(%rbx)
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 	cmpl	$IPL_HIGH,%ebx
 	jae	2f
 	jmp	1f
@@ -139,13 +139,13 @@ IDTVEC_END(handle_lapic_ipi)
 IDTVEC(resume_lapic_ipi)
 1:
 	incl	CPUVAR(IDEPTH)
-	movl	$IPL_HIGH,CPUVAR(ILEVEL)
+	movb	$IPL_HIGH,CPUVAR(ILEVEL)
 	sti
 	pushq	%rbx
 	call	_C_LABEL(x86_ipi_handler)
 	jmp	_C_LABEL(Xdoreti)
 2:
-	orl	$(1 << LIR_IPI),CPUVAR(IPENDING)
+	btsq	$LIR_IPI,CPUVAR(IPENDING)
 	INTRFASTEXIT
 IDTVEC_END(resume_lapic_ipi)
 
@@ -223,7 +223,7 @@ IDTVEC(handle_x2apic_ltimer)
 	xorl	%eax,%eax
 	xorl	%edx,%edx
 	wrmsr
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 	cmpl	$IPL_CLOCK,%ebx
 	jae	2f
 	jmp	1f
@@ -231,7 +231,7 @@ IDTVEC_END(handle_x2apic_ltimer)
 IDTVEC(handle_lapic_ltimer)
 	movq	_C_LABEL(local_apic_va),%rbx
 	movl	$0,LAPIC_EOI(%rbx)
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 	cmpl	$IPL_CLOCK,%ebx
 	jae	2f
 	jmp	1f
@@ -239,7 +239,7 @@ IDTVEC_END(handle_lapic_ltimer)
 IDTVEC(resume_lapic_ltimer)
 1:
 	incl	CPUVAR(IDEPTH)
-	movl	$IPL_CLOCK,CPUVAR(ILEVEL)
+	movb	$IPL_CLOCK,CPUVAR(ILEVEL)
 	sti
 	pushq	%rbx
 	movq	%rsp,%rsi
@@ -247,7 +247,7 @@ IDTVEC(resume_lapic_ltimer)
 	call	_C_LABEL(lapic_clockintr)
 	jmp	_C_LABEL(Xdoreti)
 2:
-	orl	$(1 << LIR_TIMER),CPUVAR(IPENDING)
+	btsq	$LIR_TIMER,CPUVAR(IPENDING)
 	INTRFASTEXIT
 IDTVEC_END(resume_lapic_ltimer)
 
@@ -279,7 +279,7 @@ IDTVEC(recurse_hyperv_hypercall)
 	jmp	1f
 IDTVEC_END(recurse_hyperv_hypercall)
 IDTVEC(handle_hyperv_hypercall)
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 	cmpl	$IPL_NET,%ebx
 	jae	2f
 	jmp	1f
@@ -287,14 +287,14 @@ IDTVEC_END(handle_hyperv_hypercall)
 IDTVEC(resume_hyperv_hypercall)
 1:
 	incl	CPUVAR(IDEPTH)
-	movl	$IPL_NET,CPUVAR(ILEVEL)
+	movb	$IPL_NET,CPUVAR(ILEVEL)
 	sti
 	pushq	%rbx
 	movq	%rsp,%rsi
 	call	_C_LABEL(hyperv_hypercall_intr)
 	jmp	_C_LABEL(Xdoreti)
 2:
-	orl	$(1 << LIR_HV),CPUVAR(IPENDING)
+	btsq	$LIR_HV,CPUVAR(IPENDING)
 	INTRFASTEXIT
 IDTVEC_END(resume_hyperv_hypercall)
 
@@ -380,14 +380,14 @@ IDTVEC(handle_ ## name ## num)						;\
 	testq	%r14,%r14						;\
 	jz	9f			/* stray */			;\
 	movl	IS_MAXLEVEL(%r14),%ebx					;\
-	movl	CPUVAR(ILEVEL),%r13d					;\
+	movzbl	CPUVAR(ILEVEL),%r13d					;\
 	cmpl	%ebx,%r13d						;\
 	jae	10f			/* currently masked; hold it */	;\
 	incq	CPUVAR(NINTR)		/* statistical info */		;\
 	incq	IS_EVCNT(%r14)						;\
 1:									\
 	pushq	%r13			/* save for Xdoreti */		;\
-	movl	%ebx,CPUVAR(ILEVEL)					;\
+	movb	%bl,CPUVAR(ILEVEL)					;\
 	sti								;\
 	incl	CPUVAR(IDEPTH)						;\
 	movq	IS_HANDLERS(%r14),%rbx					;\
@@ -399,7 +399,7 @@ IDTVEC(handle_ ## name ## num)						;\
 	jle	7f							;\
 	movq	%rsp,%rsi						;\
 	movq	IH_ARG(%rbx),%rdi					;\
-	movl	%r12d,CPUVAR(ILEVEL)					;\
+	movb	%r12b,CPUVAR(ILEVEL)					;\
 	call	*IH_FUN(%rbx)		/* call it */			;\
 	movq	IH_NEXT(%rbx),%rbx	/* next handler in chain */	;\
 	testq	%rbx,%rbx						;\
@@ -414,19 +414,19 @@ IDTVEC(handle_ ## name ## num)						;\
 	jmp	_C_LABEL(Xdoreti)	/* lower spl and do ASTs */	;\
 7:									\
 	cli								;\
-	orl	$(1 << num),CPUVAR(IPENDING)				;\
+	btsq	$num,CPUVAR(IPENDING)					;\
 8:	level_mask(num)							;\
 	late_ack(num)							;\
 	sti								;\
 	jmp	_C_LABEL(Xdoreti)	/* lower spl and do ASTs */	;\
 12:									\
 	cli								;\
-	orl	$(1 << num),CPUVAR(IMASKED)				;\
-	btrl	$(num),CPUVAR(IPENDING)					;\
+	btsq	$num,CPUVAR(IMASKED)					;\
+	btrq	$(num),CPUVAR(IPENDING)					;\
 	jmp	8b							;\
 10:									\
 	cli								;\
-	orl	$(1 << num),CPUVAR(IPENDING)				;\
+	btsq	$num,CPUVAR(IPENDING)					;\
 	level_mask(num)							;\
 	late_ack(num)							;\
 	INTRFASTEXIT							;\
@@ -484,137 +484,69 @@ INTRSTUB(legacy,15,i8259_asm_ack2,voidop
 
 #if NIOAPIC > 0
 
-INTRSTUB(ioapic_edge,0,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,1,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,2,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,3,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,4,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,5,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,6,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,7,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,8,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,9,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,10,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,11,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,12,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,13,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,14,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,15,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,16,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,17,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,18,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,19,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,20,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,21,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,22,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,23,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,24,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,25,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,26,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,27,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,28,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,29,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,30,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,31,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-
-INTRSTUB(ioapic_level,0,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,1,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,2,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,3,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,4,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,5,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,6,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,7,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,8,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,9,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,10,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,11,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,12,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,13,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,14,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,15,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,16,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,17,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,18,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,19,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,20,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,21,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,22,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,23,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,24,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,25,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,26,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,27,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,28,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,29,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,30,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,31,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-
-INTRSTUB(x2apic_edge,0,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,1,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,2,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,3,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,4,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,5,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,6,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,7,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,8,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,9,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,10,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,11,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,12,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,13,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,14,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,15,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,16,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,17,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,18,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,19,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,20,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,21,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,22,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,23,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,24,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,25,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,26,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,27,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,28,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,29,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,30,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,31,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-
-INTRSTUB(x2apic_level,0,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,1,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,2,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,3,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,4,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,5,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,6,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,7,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,8,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,9,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,10,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,11,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,12,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,13,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,14,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,15,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,16,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,17,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,18,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,19,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,20,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,21,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,22,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,23,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,24,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,25,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,26,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,27,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,28,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,29,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,30,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,31,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
+#define INTRSTUB_56(name,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,0,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,1,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,2,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,3,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,4,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,5,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,6,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,7,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,8,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,9,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,10,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,11,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,12,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,13,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,14,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,15,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,16,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,17,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,18,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,19,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,20,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,21,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,22,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,23,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,24,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,25,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,26,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,27,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,28,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,29,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,30,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,31,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,32,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,33,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,34,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,35,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,36,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,37,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,38,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,39,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,40,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,41,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,42,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,43,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,44,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,45,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,46,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,47,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,48,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,49,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,50,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,51,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,52,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,53,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,54,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,55,early_ack,late_ack,mask,unmask,level_mask)
+
+INTRSTUB_56(ioapic_edge,voidop,ioapic_asm_ack,voidop,voidop,voidop)
+INTRSTUB_56(ioapic_level,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
+
+INTRSTUB_56(x2apic_edge,voidop,x2apic_asm_ack,voidop,voidop,voidop)
+INTRSTUB_56(x2apic_level,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
 
 #endif
 
@@ -651,9 +583,9 @@ LABEL(name ## _stubs)				; \
 END(name ## _stubs)
 
 /*
- * Create an array of structs intrstub (32 entries).
+ * Create an array of structs intrstub (56 entries).
  */
-#define INTRSTUB_ARRAY_32(name) 		; \
+#define INTRSTUB_ARRAY_56(name) 		; \
 	.type _C_LABEL(name ## _stubs), @object	; \
 	.align 8				; \
 LABEL(name ## _stubs)				; \
@@ -689,6 +621,30 @@ LABEL(name ## _stubs)				; \
 	INTRSTUB_ENTRY(name ## 29)		; \
 	INTRSTUB_ENTRY(name ## 30)		; \
 	INTRSTUB_ENTRY(name ## 31)		; \
+	INTRSTUB_ENTRY(name ## 32)		; \
+	INTRSTUB_ENTRY(name ## 33)		; \
+	INTRSTUB_ENTRY(name ## 34)		; \
+	INTRSTUB_ENTRY(name ## 35)		; \
+	INTRSTUB_ENTRY(name ## 36)		; \
+	INTRSTUB_ENTRY(name ## 37)		; \
+	INTRSTUB_ENTRY(name ## 38)		; \
+	INTRSTUB_ENTRY(name ## 39)		; \
+	INTRSTUB_ENTRY(name ## 40)		; \
+	INTRSTUB_ENTRY(name ## 41)		; \
+	INTRSTUB_ENTRY(name ## 42)		; \
+	INTRSTUB_ENTRY(name ## 43)		; \
+	INTRSTUB_ENTRY(name ## 44)		; \
+	INTRSTUB_ENTRY(name ## 45)		; \
+	INTRSTUB_ENTRY(name ## 46)		; \
+	INTRSTUB_ENTRY(name ## 47)		; \
+	INTRSTUB_ENTRY(name ## 48)		; \
+	INTRSTUB_ENTRY(name ## 49)		; \
+	INTRSTUB_ENTRY(name ## 50)		; \
+	INTRSTUB_ENTRY(name ## 51)		; \
+	INTRSTUB_ENTRY(name ## 52)		; \
+	INTRSTUB_ENTRY(name ## 53)		; \
+	INTRSTUB_ENTRY(name ## 54)		; \
+	INTRSTUB_ENTRY(name ## 55)		; \
 END(name ## _stubs)
 
 #endif /* !XENPV */
@@ -707,7 +663,7 @@ IDTVEC(resume_ ## name ## sir)						\
 	movq	CPUVAR(ISOURCES) + (sir) * 8,%r14			;\
 1:									\
 	pushq	%r13							;\
-	movl	$level,CPUVAR(ILEVEL)					;\
+	movb	$level,CPUVAR(ILEVEL)					;\
 	STI(si)								;\
 	incl	CPUVAR(IDEPTH)						;\
 	movq	IS_HANDLERS(%r14),%rbx					;\
@@ -772,7 +728,7 @@ ENTRY(hypervisor_callback)
 	pushq	$T_ASTFLT
 	INTRENTRY
 IDTVEC(handle_hypervisor_callback)
-	movl    CPUVAR(ILEVEL),%edi
+	movzbl	CPUVAR(ILEVEL),%rdi
 	pushq   %rdi /* for Xdoreti */
 	incl	CPUVAR(IDEPTH)
 	movq	%rsp,%rdi
@@ -822,10 +778,10 @@ END(failsafe_callback)
 INTRSTUB_ARRAY_16(legacy)
 
 #if NIOAPIC > 0
-INTRSTUB_ARRAY_32(ioapic_edge)
-INTRSTUB_ARRAY_32(ioapic_level)
+INTRSTUB_ARRAY_56(ioapic_edge)
+INTRSTUB_ARRAY_56(ioapic_level)
 
-INTRSTUB_ARRAY_32(x2apic_edge)
-INTRSTUB_ARRAY_32(x2apic_level)
+INTRSTUB_ARRAY_56(x2apic_edge)
+INTRSTUB_ARRAY_56(x2apic_level)
 #endif
 #endif /* !XENPV */

Index: src/sys/arch/i386/i386/genassym.cf
diff -u src/sys/arch/i386/i386/genassym.cf:1.125 src/sys/arch/i386/i386/genassym.cf:1.126
--- src/sys/arch/i386/i386/genassym.cf:1.125	Sat Aug 20 23:48:50 2022
+++ src/sys/arch/i386/i386/genassym.cf	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-#	$NetBSD: genassym.cf,v 1.125 2022/08/20 23:48:50 riastradh Exp $
+#	$NetBSD: genassym.cf,v 1.126 2022/09/07 00:40:18 knakahara Exp $
 
 #
 # Copyright (c) 1998, 2006, 2007, 2008 The NetBSD Foundation, Inc.
@@ -267,12 +267,12 @@ define	CPU_INFO_VENDOR		offsetof(struct 
 define	CPU_INFO_SIGNATURE	offsetof(struct cpu_info, ci_signature)
 
 define	CPU_INFO_GDT		offsetof(struct cpu_info, ci_gdt)
-define	CPU_INFO_IPENDING	offsetof(struct cpu_info, ci_ipending)
+define	CPU_INFO_IPENDING	offsetof(struct cpu_info, ci_istate)
 define	CPU_INFO_IMASKED	offsetof(struct cpu_info, ci_imasked)
 define	CPU_INFO_IMASK		offsetof(struct cpu_info, ci_imask)
 define	CPU_INFO_ISOURCES	offsetof(struct cpu_info, ci_isources)
 define	CPU_INFO_IUNMASK	offsetof(struct cpu_info, ci_iunmask)
-define	CPU_INFO_ILEVEL		offsetof(struct cpu_info, ci_ilevel)
+define	CPU_INFO_ILEVEL		(offsetof(struct cpu_info, ci_istate) + 7)
 define	CPU_INFO_IDEPTH		offsetof(struct cpu_info, ci_idepth)
 define	CPU_INFO_MTX_COUNT	offsetof(struct cpu_info, ci_mtx_count)
 define	CPU_INFO_MTX_OLDSPL	offsetof(struct cpu_info, ci_mtx_oldspl)

Index: src/sys/arch/i386/i386/i386_trap.S
diff -u src/sys/arch/i386/i386/i386_trap.S:1.22 src/sys/arch/i386/i386/i386_trap.S:1.23
--- src/sys/arch/i386/i386/i386_trap.S:1.22	Sat May  2 18:49:57 2020
+++ src/sys/arch/i386/i386/i386_trap.S	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: i386_trap.S,v 1.22 2020/05/02 18:49:57 bouyer Exp $	*/
+/*	$NetBSD: i386_trap.S,v 1.23 2022/09/07 00:40:18 knakahara Exp $	*/
 
 /*
  * Copyright 2002 (c) Wasabi Systems, Inc.
@@ -66,7 +66,7 @@
 
 #if 0
 #include <machine/asm.h>
-__KERNEL_RCSID(0, "$NetBSD: i386_trap.S,v 1.22 2020/05/02 18:49:57 bouyer Exp $");
+__KERNEL_RCSID(0, "$NetBSD: i386_trap.S,v 1.23 2022/09/07 00:40:18 knakahara Exp $");
 #endif
 
 /*
@@ -215,7 +215,7 @@ IDTVEC(trap07)
 	pushl	$T_DNA
 	INTRENTRY
 #ifdef DIAGNOSTIC
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 #endif
 	pushl	%esp
 	call	_C_LABEL(fpudna)
@@ -280,7 +280,7 @@ IDTVEC(trap0f)
 	INTRENTRY
 	STI(%eax)
 #ifdef DIAGNOSTIC
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 #endif
 	jmp	.Lalltraps_checkusr
 IDTVEC_END(trap0f)
@@ -296,7 +296,7 @@ IDTVEC(trap10)
 	pushl	$T_ARITHTRAP
 .Ldo_fputrap:
 	INTRENTRY
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 	pushl	%esp
 	addl	$1,CPUVAR(NTRAP)	/* statistical info */
 	adcl	$0,CPUVAR(NTRAP)+4
@@ -404,7 +404,7 @@ ENTRY(alltraps)
 
 calltrap:
 #ifdef DIAGNOSTIC
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 #endif
 	addl	$1,CPUVAR(NTRAP)	/* statistical info */
 	adcl	$0,CPUVAR(NTRAP)+4
@@ -444,7 +444,7 @@ calltrap:
 	jz	22f
 	/* process pending interrupts */
 	CLI(%eax)
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 	movl	$.Lalltraps_resume,%esi /* address to resume loop at */
 .Lalltraps_resume:
 	movl	%ebx,%eax		/* get cpl */
@@ -455,14 +455,14 @@ calltrap:
 	btrl	%eax,CPUVAR(IPENDING)
 	movl	CPUVAR(ISOURCES)(,%eax,4),%eax
 	jmp	*IS_RESUME(%eax)
-11:	movl	%ebx,CPUVAR(ILEVEL)	/* restore cpl */
+11:	movb	%bl,CPUVAR(ILEVEL)	/* restore cpl */
 	jmp	.Lalltraps_checkusr
 22:
 #endif /* XEN */
 
 6:
 #ifdef DIAGNOSTIC
-	cmpl	CPUVAR(ILEVEL),%ebx
+	cmpb	CPUVAR(ILEVEL),%bl
 	jne	.Lspl_error
 #endif
 	INTRFASTEXIT

Index: src/sys/arch/i386/i386/lock_stubs.S
diff -u src/sys/arch/i386/i386/lock_stubs.S:1.36 src/sys/arch/i386/i386/lock_stubs.S:1.37
--- src/sys/arch/i386/i386/lock_stubs.S:1.36	Wed Apr  6 22:47:57 2022
+++ src/sys/arch/i386/i386/lock_stubs.S	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: lock_stubs.S,v 1.36 2022/04/06 22:47:57 riastradh Exp $	*/
+/*	$NetBSD: lock_stubs.S,v 1.37 2022/09/07 00:40:18 knakahara Exp $	*/
 
 /*-
  * Copyright (c) 2006, 2007, 2008, 2009 The NetBSD Foundation, Inc.
@@ -35,7 +35,7 @@
  */
 
 #include <machine/asm.h>
-__KERNEL_RCSID(0, "$NetBSD: lock_stubs.S,v 1.36 2022/04/06 22:47:57 riastradh Exp $");
+__KERNEL_RCSID(0, "$NetBSD: lock_stubs.S,v 1.37 2022/09/07 00:40:18 knakahara Exp $");
 
 #include "opt_lockdebug.h"
 
@@ -264,12 +264,15 @@ ENTRY(mutex_spin_exit)
 	incl	CPUVAR(MTX_COUNT)
 	movb	$0, MTX_LOCK(%edx)		/* zero */
 	jnz	1f
-	movl	%fs:CPU_INFO_IUNMASK(,%ecx,4), %edx
+	movl	CPUVAR(IUNMASK)(,%ecx,8), %edx
+	movl	CPUVAR(IUNMASK)+4(,%ecx,8), %eax
 	cli
 	testl	CPUVAR(IPENDING), %edx
 	movl    %ecx, 4(%esp)
 	jnz	_C_LABEL(Xspllower)		/* does sti */
-	movl	%ecx, CPUVAR(ILEVEL)
+	testl	CPUVAR(IPENDING)+4, %eax
+	jnz	_C_LABEL(Xspllower)		/* does sti */
+	movb	%cl, CPUVAR(ILEVEL)
 	sti
 1:	ret
 	.space	32, 0xCC
@@ -287,20 +290,36 @@ ENTRY(i686_mutex_spin_exit)
 	mov	4(%esp),%edx
 	movl	CPUVAR(MTX_OLDSPL), %ecx
 	incl	CPUVAR(MTX_COUNT)
-	movb	%ch, MTX_LOCK(%edx)		/* zero */
+	movb	$0, MTX_LOCK(%edx)		/* zero */
 	jnz	1f
 	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	%ecx, %esi
+	movl	%ecx, %edi
+	shll	$24, %edi
 0:
 	movl	CPUVAR(IPENDING), %eax
-	testl	%eax, %fs:CPU_INFO_IUNMASK(,%ecx,4)
+	testl	%eax, CPUVAR(IUNMASK)(,%esi,8)
+	jnz	2f
+	movl	CPUVAR(IPENDING)+4, %edx
+	testl	%edx, CPUVAR(IUNMASK)+4(,%esi,8)
 	jnz	2f
 	movl	%eax, %ebx
+	movl	%edx, %ecx
+	andl	$0x00ffffff, %ecx
+	orl	%edi, %ecx
 	cmpxchg8b CPUVAR(ISTATE)		/* swap in new ilevel */
 	jnz	0b
+	popl	%edi
+	popl	%esi
 	popl	%ebx
 1:
 	ret
 2:
+	movl	%esi,%ecx
+	popl	%edi
+	popl	%esi
 	popl	%ebx
 	movl	%ecx,4(%esp)
 

Index: src/sys/arch/i386/i386/locore.S
diff -u src/sys/arch/i386/i386/locore.S:1.189 src/sys/arch/i386/i386/locore.S:1.190
--- src/sys/arch/i386/i386/locore.S:1.189	Sun Jun 12 11:36:42 2022
+++ src/sys/arch/i386/i386/locore.S	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: locore.S,v 1.189 2022/06/12 11:36:42 bouyer Exp $	*/
+/*	$NetBSD: locore.S,v 1.190 2022/09/07 00:40:18 knakahara Exp $	*/
 
 /*
  * Copyright-o-rama!
@@ -128,7 +128,7 @@
  */
 
 #include <machine/asm.h>
-__KERNEL_RCSID(0, "$NetBSD: locore.S,v 1.189 2022/06/12 11:36:42 bouyer Exp $");
+__KERNEL_RCSID(0, "$NetBSD: locore.S,v 1.190 2022/09/07 00:40:18 knakahara Exp $");
 
 #include "opt_copy_symtab.h"
 #include "opt_ddb.h"
@@ -1543,7 +1543,7 @@ IDTVEC(syscall)
 	STI(%eax)
 
 #ifdef DIAGNOSTIC
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 	testl	%ebx,%ebx
 	jz	1f
 	pushl	$5f
@@ -1580,7 +1580,7 @@ IDTVEC(syscall)
 	jz	14f
 	/* process pending interrupts */
 	CLI(%eax)
-	movl	CPUVAR(ILEVEL), %ebx
+	movzbl	CPUVAR(ILEVEL), %ebx
 	movl	$.Lsyscall_resume, %esi /* address to resume loop at */
 .Lsyscall_resume:
 	movl	%ebx,%eax		/* get cpl */
@@ -1591,13 +1591,13 @@ IDTVEC(syscall)
 	btrl	%eax,CPUVAR(IPENDING)
 	movl	CPUVAR(ISOURCES)(,%eax,4),%eax
 	jmp	*IS_RESUME(%eax)
-17:	movl	%ebx, CPUVAR(ILEVEL)	/* restore cpl  */
+17:	movb	%bl, CPUVAR(ILEVEL)	/* restore cpl  */
 	jmp	.Lsyscall_checkast
 14:
 #endif /* XENPV */
 
 #ifdef DIAGNOSTIC
-	cmpl	$IPL_NONE,CPUVAR(ILEVEL)
+	cmpb	$IPL_NONE,CPUVAR(ILEVEL)
 	jne	3f
 #endif
 

Index: src/sys/arch/i386/i386/spl.S
diff -u src/sys/arch/i386/i386/spl.S:1.55 src/sys/arch/i386/i386/spl.S:1.56
--- src/sys/arch/i386/i386/spl.S:1.55	Sun May 17 12:12:22 2020
+++ src/sys/arch/i386/i386/spl.S	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: spl.S,v 1.55 2020/05/17 12:12:22 ad Exp $	*/
+/*	$NetBSD: spl.S,v 1.56 2022/09/07 00:40:18 knakahara Exp $	*/
 
 /*
  * Copyright (c) 1998, 2007, 2008, 2020 The NetBSD Foundation, Inc.
@@ -30,7 +30,7 @@
  */
 
 #include <machine/asm.h>
-__KERNEL_RCSID(0, "$NetBSD: spl.S,v 1.55 2020/05/17 12:12:22 ad Exp $");
+__KERNEL_RCSID(0, "$NetBSD: spl.S,v 1.56 2022/09/07 00:40:18 knakahara Exp $");
 
 #include "opt_ddb.h"
 #include "opt_spldebug.h"
@@ -49,10 +49,10 @@ __KERNEL_RCSID(0, "$NetBSD: spl.S,v 1.55
  */
 ENTRY(splraise)
 	movl	4(%esp),%edx
-	movl	CPUVAR(ILEVEL),%eax
+	movzbl	CPUVAR(ILEVEL),%eax
 	cmpl	%edx,%eax
 	ja	1f
-	movl	%edx,CPUVAR(ILEVEL)
+	movb	%dl,CPUVAR(ILEVEL)
 1:
 #ifdef SPLDEBUG
 	pushl	%ebp
@@ -87,14 +87,17 @@ ENTRY(spllower)
 	popl	%ebp
 #endif /* SPLDEBUG */
 	movl	4(%esp),%ecx
-	cmpl	CPUVAR(ILEVEL),%ecx
+	cmpb	CPUVAR(ILEVEL),%cl
 	jae	1f
-	movl	CPUVAR(IUNMASK)(,%ecx,4),%edx
+	movl	CPUVAR(IUNMASK)(,%ecx,8),%edx
+	movl	CPUVAR(IUNMASK)+4(,%ecx,8),%eax
 	PUSHF(%eax)
 	CLI(%eax)
 	testl	CPUVAR(IPENDING),%edx
 	jnz	2f
-	movl	%ecx,CPUVAR(ILEVEL)
+	testl	CPUVAR(IPENDING)+4,%eax
+	jnz	2f
+	movb	%cl,CPUVAR(ILEVEL)
 	POPF(%eax)
 1:
 	ret
@@ -117,26 +120,41 @@ STRONG_ALIAS(spllower, cx8_spllower)
  *
  * For cmpxchg8b, edx/ecx are the high words and eax/ebx the low.
  *
- * edx : eax = old level / old ipending
- * ecx : ebx = new level / old ipending
+ * edx : eax = old level + high 24 bit old ipending / low 32 bit old ipending
+ * ecx : ebx = new level + high 24 bit old ipending / low 32 bit old ipending
  */
 ENTRY(cx8_spllower)
 	movl	4(%esp),%ecx
-	movl	CPUVAR(ILEVEL),%edx
+	movzbl	CPUVAR(ILEVEL),%edx
 	cmpl	%edx,%ecx			/* new level is lower? */
-	pushl	%ebx
 	jae	1f
+	pushl	%ebx
+	pushl	%esi
+	pushl	%edi
+	movl	%ecx,%esi
+	movl	%ecx,%edi
+	shll	$24,%edi
 0:
 	movl	CPUVAR(IPENDING),%eax
-	testl	%eax,CPUVAR(IUNMASK)(,%ecx,4)	/* deferred interrupts? */
-	movl	%eax,%ebx
+	testl	%eax,CPUVAR(IUNMASK)(,%esi,8)	/* deferred interrupts? */
 	jnz	2f
+	movl	CPUVAR(IPENDING)+4,%edx
+	testl	%edx,CPUVAR(IUNMASK)+4(,%esi,8)
+	jnz	2f
+	movl	%eax,%ebx
+	movl	%edx,%ecx
+	andl	$0x00ffffff,%ecx
+	orl	%edi,%ecx
 	cmpxchg8b CPUVAR(ISTATE)		/* swap in new ilevel */
 	jnz	0b
-1:
+	popl	%edi
+	popl	%esi
 	popl	%ebx
+1:
 	ret
 2:
+	popl	%edi
+	popl	%esi
 	popl	%ebx
 
 	/* The reference must be absolute, hence the indirect jump. */
@@ -203,7 +221,17 @@ IDTVEC(spllower)
 #endif /* XENPV */
 #endif /* defined(DEBUG) */
 	movl	%ebx,%eax			/* get cpl */
-	movl	CPUVAR(IUNMASK)(,%eax,4),%eax
+	movl	CPUVAR(IUNMASK)+4(,%eax,8),%eax
+	andl	CPUVAR(IPENDING)+4,%eax		/* any non-masked bits left? */
+	jz	10f
+	bsrl	%eax,%eax
+	btrl	%eax,CPUVAR(IPENDING)+4
+	addl	$32,%eax
+	movl	CPUVAR(ISOURCES)(,%eax,4),%eax
+	jmp	*IS_RECURSE(%eax)
+10:
+	movl	%ebx,%eax			/* get cpl */
+	movl	CPUVAR(IUNMASK)(,%eax,8),%eax
 	andl	CPUVAR(IPENDING),%eax		/* any non-masked bits left? */
 	jz	2f
 	bsrl	%eax,%eax
@@ -211,7 +239,7 @@ IDTVEC(spllower)
 	movl	CPUVAR(ISOURCES)(,%eax,4),%eax
 	jmp	*IS_RECURSE(%eax)
 2:
-	movl	%ebx,CPUVAR(ILEVEL)
+	movb	%bl,CPUVAR(ILEVEL)
 #ifdef XENPV
 	STIC(%eax)
 	jz 4f
@@ -267,7 +295,17 @@ IDTVEC(doreti)
 #endif /* defined(DEBUG) */
 
 	movl	%ebx,%eax
-	movl	CPUVAR(IUNMASK)(,%eax,4),%eax
+	movl	CPUVAR(IUNMASK)+4(,%eax,8),%eax
+	andl	CPUVAR(IPENDING)+4,%eax
+	jz	10f
+	bsrl	%eax,%eax		/* slow, but not worth optimizing */
+	btrl	%eax,CPUVAR(IPENDING)+4
+	addl	$32,%eax
+	movl	CPUVAR(ISOURCES)(,%eax, 4),%eax
+	jmp	*IS_RESUME(%eax)
+10:
+	movl	%ebx,%eax
+	movl	CPUVAR(IUNMASK)(,%eax,8),%eax
 	andl	CPUVAR(IPENDING),%eax
 	jz	2f
 	bsrl	%eax,%eax		/* slow, but not worth optimizing */
@@ -275,7 +313,7 @@ IDTVEC(doreti)
 	movl	CPUVAR(ISOURCES)(,%eax, 4),%eax
 	jmp	*IS_RESUME(%eax)
 2:	/* Check for ASTs on exit to user mode. */
-	movl	%ebx,CPUVAR(ILEVEL)
+	movb	%bl,CPUVAR(ILEVEL)
 5:
 	testb	$CHK_UPL,TF_CS(%esp)
 	jnz	doreti_checkast
@@ -342,7 +380,7 @@ IDTVEC(softintr)
 	pushl	%ebx
 	pushl	%esi
 	pushl	%edi
-	movl	$IPL_HIGH,CPUVAR(ILEVEL)
+	movb	$IPL_HIGH,CPUVAR(ILEVEL)
 	STI(%esi)
 	movl	CPUVAR(CURLWP),%esi
 	movl	IS_LWP(%eax),%edi	/* switch to handler LWP */
@@ -403,7 +441,7 @@ END(softint_trigger)
  * Handles preemption interrupts via Xspllower().
  */
 IDTVEC(recurse_preempt)
-	movl	$IPL_PREEMPT,CPUVAR(ILEVEL)
+	movb	$IPL_PREEMPT,CPUVAR(ILEVEL)
 	STI(%eax)
 	pushl	$0
 	call	_C_LABEL(kpreempt)
@@ -418,7 +456,7 @@ IDTVEC_END(recurse_preempt)
  * Handles preemption interrupts via Xdoreti().
  */
 IDTVEC(resume_preempt)
-	movl	$IPL_PREEMPT,CPUVAR(ILEVEL)
+	movb	$IPL_PREEMPT,CPUVAR(ILEVEL)
 	STI(%eax)
 	testb	$CHK_UPL,TF_CS(%esp)
 	jnz	1f

Index: src/sys/arch/i386/i386/vector.S
diff -u src/sys/arch/i386/i386/vector.S:1.88 src/sys/arch/i386/i386/vector.S:1.89
--- src/sys/arch/i386/i386/vector.S:1.88	Tue May 24 15:55:19 2022
+++ src/sys/arch/i386/i386/vector.S	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: vector.S,v 1.88 2022/05/24 15:55:19 bouyer Exp $	*/
+/*	$NetBSD: vector.S,v 1.89 2022/09/07 00:40:18 knakahara Exp $	*/
 
 /*
  * Copyright 2002 (c) Wasabi Systems, Inc.
@@ -65,7 +65,7 @@
  */
 
 #include <machine/asm.h>
-__KERNEL_RCSID(0, "$NetBSD: vector.S,v 1.88 2022/05/24 15:55:19 bouyer Exp $");
+__KERNEL_RCSID(0, "$NetBSD: vector.S,v 1.89 2022/09/07 00:40:18 knakahara Exp $");
 
 #include "opt_ddb.h"
 #include "opt_multiprocessor.h"
@@ -165,7 +165,7 @@ IDTVEC(intr_x2apic_ipi)
 	xorl	%eax,%eax
 	xorl	%edx,%edx
 	wrmsr
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 	cmpl	$IPL_HIGH,%ebx
 	jae	2f
 	jmp	1f
@@ -176,7 +176,7 @@ IDTVEC(intr_lapic_ipi)
 	INTRENTRY
 	movl	_C_LABEL(local_apic_va),%ebx
 	movl	$0,LAPIC_EOI(%ebx)
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 	cmpl	$IPL_HIGH,%ebx
 	jae	2f
 IDTVEC_END(intr_lapic_ipi)
@@ -184,13 +184,13 @@ IDTVEC(resume_lapic_ipi)
 1:
 	pushl	%ebx
 	IDEPTH_INCR
-	movl	$IPL_HIGH,CPUVAR(ILEVEL)
+	movb	$IPL_HIGH,CPUVAR(ILEVEL)
 	sti
 	call	_C_LABEL(x86_ipi_handler)
 	cli
 	jmp	_C_LABEL(Xdoreti)
 2:
-	orl	$(1 << LIR_IPI),CPUVAR(IPENDING)
+	btsl	$(LIR_IPI - 32),CPUVAR(IPENDING)+4
 	INTRFASTEXIT
 IDTVEC_END(resume_lapic_ipi)
 
@@ -290,7 +290,7 @@ IDTVEC(intr_x2apic_ltimer)
 	xorl	%eax,%eax
 	xorl	%edx,%edx
 	wrmsr
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 	cmpl	$IPL_CLOCK,%ebx
 	jae	2f
 	jmp	1f
@@ -301,7 +301,7 @@ IDTVEC(intr_lapic_ltimer)
 	INTRENTRY
 	movl	_C_LABEL(local_apic_va),%ebx
 	movl	$0,LAPIC_EOI(%ebx)
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 	cmpl	$IPL_CLOCK,%ebx
 	jae	2f
 IDTVEC_END(intr_lapic_ltimer)
@@ -309,7 +309,7 @@ IDTVEC(resume_lapic_ltimer)
 1:
 	pushl	%ebx
 	IDEPTH_INCR
-	movl	$IPL_CLOCK,CPUVAR(ILEVEL)
+	movb	$IPL_CLOCK,CPUVAR(ILEVEL)
 	sti
 	pushl	$0
 	call	_C_LABEL(lapic_clockintr)
@@ -317,7 +317,7 @@ IDTVEC(resume_lapic_ltimer)
 	cli
 	jmp	_C_LABEL(Xdoreti)
 2:
-	orl	$(1 << LIR_TIMER),CPUVAR(IPENDING)
+	btsl	$(LIR_TIMER - 32),CPUVAR(IPENDING)+4
 	INTRFASTEXIT
 IDTVEC_END(resume_lapic_ltimer)
 
@@ -337,7 +337,7 @@ IDTVEC(intr_hyperv_hypercall)
 	pushl	$0
 	pushl	$T_ASTFLT
 	INTRENTRY
-	movl	CPUVAR(ILEVEL),%ebx
+	movzbl	CPUVAR(ILEVEL),%ebx
 	cmpl	$IPL_NET,%ebx
 	jae	2f
 	jmp	1f
@@ -346,7 +346,7 @@ IDTVEC(resume_hyperv_hypercall)
 1:
 	pushl	%ebx
 	IDEPTH_INCR
-	movl	$IPL_NET,CPUVAR(ILEVEL)
+	movb	$IPL_NET,CPUVAR(ILEVEL)
 	sti
 	pushl	%esp
 	call	_C_LABEL(hyperv_hypercall_intr)
@@ -354,7 +354,7 @@ IDTVEC(resume_hyperv_hypercall)
 	cli
 	jmp	_C_LABEL(Xdoreti)
 2:
-	orl	$(1 << LIR_HV),CPUVAR(IPENDING)
+	btsl	$(LIR_HV - 32),CPUVAR(IPENDING)+4
 	INTRFASTEXIT
 IDTVEC_END(resume_hyperv_hypercall)
 #endif	/* NHYPERV > 0 */
@@ -370,7 +370,7 @@ IDTVEC_END(resume_hyperv_hypercall)
  * for specific PICs.
  */
 
-#define	INTRSTUB(name, num, early_ack, late_ack, mask, unmask, level_mask) \
+#define	INTRSTUB1(name, num, sub, off, early_ack, late_ack, mask, unmask, level_mask) \
 IDTVEC(recurse_ ## name ## num)						;\
 	INTR_RECURSE_HWFRAME						;\
 	subl	$4,%esp							;\
@@ -394,7 +394,7 @@ IDTVEC(intr_ ## name ## num)						;\
 	testl	%ebp,%ebp						;\
 	jz	9f			/* stray */			;\
 	movl	IS_MAXLEVEL(%ebp),%ebx					;\
-	movl	CPUVAR(ILEVEL),%esi					;\
+	movzbl	CPUVAR(ILEVEL),%esi					;\
 	cmpl	%ebx,%esi						;\
 	jae	10f			/* currently masked; hold it */	;\
 	addl	$1,CPUVAR(NINTR)	/* statistical info */		;\
@@ -403,7 +403,7 @@ IDTVEC(intr_ ## name ## num)						;\
 	adcl	$0,IS_EVCNTHI(%ebp)					;\
 1:									\
 	pushl	%esi			/* if_ppi */			;\
-	movl	%ebx,CPUVAR(ILEVEL)					;\
+	movb	%bl,CPUVAR(ILEVEL)					;\
 	/* switch stack if necessary, and push a ptr to our intrframe */ \
 	IDEPTH_INCR							;\
 	sti								;\
@@ -411,14 +411,14 @@ IDTVEC(intr_ ## name ## num)						;\
 	cmpl	$0,IS_MASK_COUNT(%ebp)	/* source currently masked? */	;\
 	jne	12f			/* yes, hold it */		;\
 6:									\
-	movl	IH_LEVEL(%ebx),%edi					;\
-	cmpl	%esi,%edi						;\
+	movl	IH_LEVEL(%ebx),%eax					;\
+	cmpl	%esi,%eax						;\
 	jle	7f							;\
 	pushl	IH_ARG(%ebx)						;\
-	movl	IH_FUN(%ebx),%eax					;\
-	movl	%edi,CPUVAR(ILEVEL)					;\
+	movl	IH_FUN(%ebx),%edi					;\
+	movb	%al,CPUVAR(ILEVEL)					;\
 	movl	IH_NEXT(%ebx),%ebx	/* next handler in chain */	;\
-	call	*%eax			/* call it */			;\
+	call	*%edi			/* call it */			;\
 	addl	$4,%esp			/* toss the arg */		;\
 	testl	%ebx,%ebx						;\
 	jnz	6b							;\
@@ -430,17 +430,17 @@ IDTVEC(intr_ ## name ## num)						;\
 	jmp	_C_LABEL(Xdoreti)	/* lower spl and do ASTs */	;\
 7:									\
 	cli								;\
-	orl	$(1 << num),CPUVAR(IPENDING)				;\
+	btsl	$(num - sub),CPUVAR(IPENDING) + off			;\
 8:	level_mask(num)							;\
 	late_ack(num)							;\
 	jmp	_C_LABEL(Xdoreti)	/* lower spl and do ASTs */	;\
 12:									\
 	cli								;\
-	orl	$(1 << num),CPUVAR(IMASKED)				;\
-	btrl	$(num),CPUVAR(IPENDING)					;\
+	btsl	$(num - sub),CPUVAR(IMASKED) + off			;\
+	btrl	$(num - sub),CPUVAR(IPENDING) + off			;\
 	jmp	8b							;\
 10:									\
-	orl	$(1 << num),CPUVAR(IPENDING)				;\
+	btsl	$(num - sub),CPUVAR(IPENDING) + off			;\
 	level_mask(num)							;\
 	late_ack(num)							;\
 	INTRFASTEXIT							;\
@@ -452,6 +452,11 @@ IDTVEC(intr_ ## name ## num)						;\
 	INTRFASTEXIT							;\
 IDTVEC_END(intr_ ## name ## num)
 
+#define	INTRSTUB(name, num, early_ack, late_ack, mask, unmask, level_mask) \
+    INTRSTUB1(name, num, 0, 0, early_ack, late_ack, mask, unmask, level_mask)
+#define	INTRSTUB32(name, num, early_ack, late_ack, mask, unmask, level_mask) \
+    INTRSTUB1(name, num, 32, 4, early_ack, late_ack, mask, unmask, level_mask)
+
 #define ICUADDR IO_ICU1
 
 INTRSTUB(legacy,0,i8259_asm_ack1,voidop,i8259_asm_mask,i8259_asm_unmask,
@@ -492,449 +497,167 @@ INTRSTUB(legacy,15,i8259_asm_ack2,voidop
 
 #if NIOAPIC > 0
 
-INTRSTUB(ioapic_edge,0,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,1,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,2,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,3,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,4,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,5,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,6,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,7,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,8,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,9,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,10,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,11,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,12,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,13,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,14,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,15,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,16,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,17,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,18,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,19,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,20,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,21,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,22,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,23,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,24,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,25,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,26,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,27,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,28,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,29,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,30,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(ioapic_edge,31,voidop,ioapic_asm_ack,voidop,voidop,voidop)
-
-INTRSTUB(ioapic_level,0,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,1,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,2,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,3,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,4,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,5,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,6,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,7,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,8,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,9,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,10,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,11,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,12,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,13,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,14,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,15,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,16,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,17,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,18,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,19,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,20,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,21,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,22,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,23,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,24,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,25,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,26,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,27,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,28,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,29,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,30,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(ioapic_level,31,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-
-INTRSTUB(x2apic_edge,0,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,1,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,2,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,3,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,4,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,5,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,6,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,7,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,8,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,9,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,10,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,11,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,12,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,13,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,14,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,15,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,16,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,17,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,18,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,19,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,20,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,21,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,22,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,23,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,24,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,25,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,26,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,27,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,28,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,29,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,30,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-INTRSTUB(x2apic_edge,31,voidop,x2apic_asm_ack,voidop,voidop,voidop)
-
-INTRSTUB(x2apic_level,0,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,1,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,2,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,3,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,4,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,5,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,6,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,7,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,8,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,9,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,10,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,11,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,12,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,13,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,14,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,15,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,16,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,17,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,18,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,19,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,20,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,21,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,22,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,23,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,24,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,25,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,26,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,27,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,28,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,29,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,30,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
-INTRSTUB(x2apic_level,31,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
+#define INTRSTUB_56(name,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,0,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,1,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,2,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,3,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,4,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,5,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,6,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,7,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,8,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,9,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,10,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,11,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,12,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,13,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,14,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,15,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,16,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,17,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,18,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,19,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,20,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,21,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,22,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,23,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,24,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,25,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,26,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,27,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,28,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,29,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,30,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB(name,31,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,32,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,33,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,34,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,35,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,36,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,37,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,38,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,39,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,40,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,41,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,42,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,43,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,44,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,45,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,46,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,47,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,48,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,49,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,50,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,51,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,52,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,53,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,54,early_ack,late_ack,mask,unmask,level_mask)	;\
+	INTRSTUB32(name,55,early_ack,late_ack,mask,unmask,level_mask)
 
-#endif
+INTRSTUB_56(ioapic_edge,voidop,ioapic_asm_ack,voidop,voidop,voidop)
+INTRSTUB_56(ioapic_level,voidop,ioapic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
 
-	.type	_C_LABEL(legacy_stubs), @object
-LABEL(legacy_stubs)
-	.long _C_LABEL(Xintr_legacy0), _C_LABEL(Xrecurse_legacy0)
-	.long _C_LABEL(Xresume_legacy0)
-	.long _C_LABEL(Xintr_legacy1), _C_LABEL(Xrecurse_legacy1)
-	.long _C_LABEL(Xresume_legacy1)
-	.long _C_LABEL(Xintr_legacy2), _C_LABEL(Xrecurse_legacy2)
-	.long _C_LABEL(Xresume_legacy2)
-	.long _C_LABEL(Xintr_legacy3), _C_LABEL(Xrecurse_legacy3)
-	.long _C_LABEL(Xresume_legacy3)
-	.long _C_LABEL(Xintr_legacy4), _C_LABEL(Xrecurse_legacy4)
-	.long _C_LABEL(Xresume_legacy4)
-	.long _C_LABEL(Xintr_legacy5), _C_LABEL(Xrecurse_legacy5)
-	.long _C_LABEL(Xresume_legacy5)
-	.long _C_LABEL(Xintr_legacy6), _C_LABEL(Xrecurse_legacy6)
-	.long _C_LABEL(Xresume_legacy6)
-	.long _C_LABEL(Xintr_legacy7), _C_LABEL(Xrecurse_legacy7)
-	.long _C_LABEL(Xresume_legacy7)
-	.long _C_LABEL(Xintr_legacy8), _C_LABEL(Xrecurse_legacy8)
-	.long _C_LABEL(Xresume_legacy8)
-	.long _C_LABEL(Xintr_legacy9), _C_LABEL(Xrecurse_legacy9)
-	.long _C_LABEL(Xresume_legacy9)
-	.long _C_LABEL(Xintr_legacy10), _C_LABEL(Xrecurse_legacy10)
-	.long _C_LABEL(Xresume_legacy10)
-	.long _C_LABEL(Xintr_legacy11), _C_LABEL(Xrecurse_legacy11)
-	.long _C_LABEL(Xresume_legacy11)
-	.long _C_LABEL(Xintr_legacy12), _C_LABEL(Xrecurse_legacy12)
-	.long _C_LABEL(Xresume_legacy12)
-	.long _C_LABEL(Xintr_legacy13), _C_LABEL(Xrecurse_legacy13)
-	.long _C_LABEL(Xresume_legacy13)
-	.long _C_LABEL(Xintr_legacy14), _C_LABEL(Xrecurse_legacy14)
-	.long _C_LABEL(Xresume_legacy14)
-	.long _C_LABEL(Xintr_legacy15), _C_LABEL(Xrecurse_legacy15)
-	.long _C_LABEL(Xresume_legacy15)
-END(legacy_stubs)
+INTRSTUB_56(x2apic_edge,voidop,x2apic_asm_ack,voidop,voidop,voidop)
+INTRSTUB_56(x2apic_level,voidop,x2apic_asm_ack,voidop,ioapic_unmask,ioapic_mask)
 
-#if NIOAPIC > 0
-	.type	_C_LABEL(ioapic_edge_stubs), @object
-LABEL(ioapic_edge_stubs)
-	.long _C_LABEL(Xintr_ioapic_edge0), _C_LABEL(Xrecurse_ioapic_edge0)
-	.long _C_LABEL(Xresume_ioapic_edge0)
-	.long _C_LABEL(Xintr_ioapic_edge1), _C_LABEL(Xrecurse_ioapic_edge1)
-	.long _C_LABEL(Xresume_ioapic_edge1)
-	.long _C_LABEL(Xintr_ioapic_edge2), _C_LABEL(Xrecurse_ioapic_edge2)
-	.long _C_LABEL(Xresume_ioapic_edge2)
-	.long _C_LABEL(Xintr_ioapic_edge3), _C_LABEL(Xrecurse_ioapic_edge3)
-	.long _C_LABEL(Xresume_ioapic_edge3)
-	.long _C_LABEL(Xintr_ioapic_edge4), _C_LABEL(Xrecurse_ioapic_edge4)
-	.long _C_LABEL(Xresume_ioapic_edge4)
-	.long _C_LABEL(Xintr_ioapic_edge5), _C_LABEL(Xrecurse_ioapic_edge5)
-	.long _C_LABEL(Xresume_ioapic_edge5)
-	.long _C_LABEL(Xintr_ioapic_edge6), _C_LABEL(Xrecurse_ioapic_edge6)
-	.long _C_LABEL(Xresume_ioapic_edge6)
-	.long _C_LABEL(Xintr_ioapic_edge7), _C_LABEL(Xrecurse_ioapic_edge7)
-	.long _C_LABEL(Xresume_ioapic_edge7)
-	.long _C_LABEL(Xintr_ioapic_edge8), _C_LABEL(Xrecurse_ioapic_edge8)
-	.long _C_LABEL(Xresume_ioapic_edge8)
-	.long _C_LABEL(Xintr_ioapic_edge9), _C_LABEL(Xrecurse_ioapic_edge9)
-	.long _C_LABEL(Xresume_ioapic_edge9)
-	.long _C_LABEL(Xintr_ioapic_edge10), _C_LABEL(Xrecurse_ioapic_edge10)
-	.long _C_LABEL(Xresume_ioapic_edge10)
-	.long _C_LABEL(Xintr_ioapic_edge11), _C_LABEL(Xrecurse_ioapic_edge11)
-	.long _C_LABEL(Xresume_ioapic_edge11)
-	.long _C_LABEL(Xintr_ioapic_edge12), _C_LABEL(Xrecurse_ioapic_edge12)
-	.long _C_LABEL(Xresume_ioapic_edge12)
-	.long _C_LABEL(Xintr_ioapic_edge13), _C_LABEL(Xrecurse_ioapic_edge13)
-	.long _C_LABEL(Xresume_ioapic_edge13)
-	.long _C_LABEL(Xintr_ioapic_edge14), _C_LABEL(Xrecurse_ioapic_edge14)
-	.long _C_LABEL(Xresume_ioapic_edge14)
-	.long _C_LABEL(Xintr_ioapic_edge15), _C_LABEL(Xrecurse_ioapic_edge15)
-	.long _C_LABEL(Xresume_ioapic_edge15)
-	.long _C_LABEL(Xintr_ioapic_edge16), _C_LABEL(Xrecurse_ioapic_edge16)
-	.long _C_LABEL(Xresume_ioapic_edge16)
-	.long _C_LABEL(Xintr_ioapic_edge17), _C_LABEL(Xrecurse_ioapic_edge17)
-	.long _C_LABEL(Xresume_ioapic_edge17)
-	.long _C_LABEL(Xintr_ioapic_edge18), _C_LABEL(Xrecurse_ioapic_edge18)
-	.long _C_LABEL(Xresume_ioapic_edge18)
-	.long _C_LABEL(Xintr_ioapic_edge19), _C_LABEL(Xrecurse_ioapic_edge19)
-	.long _C_LABEL(Xresume_ioapic_edge19)
-	.long _C_LABEL(Xintr_ioapic_edge20), _C_LABEL(Xrecurse_ioapic_edge20)
-	.long _C_LABEL(Xresume_ioapic_edge20)
-	.long _C_LABEL(Xintr_ioapic_edge21), _C_LABEL(Xrecurse_ioapic_edge21)
-	.long _C_LABEL(Xresume_ioapic_edge21)
-	.long _C_LABEL(Xintr_ioapic_edge22), _C_LABEL(Xrecurse_ioapic_edge22)
-	.long _C_LABEL(Xresume_ioapic_edge22)
-	.long _C_LABEL(Xintr_ioapic_edge23), _C_LABEL(Xrecurse_ioapic_edge23)
-	.long _C_LABEL(Xresume_ioapic_edge23)
-	.long _C_LABEL(Xintr_ioapic_edge24), _C_LABEL(Xrecurse_ioapic_edge24)
-	.long _C_LABEL(Xresume_ioapic_edge24)
-	.long _C_LABEL(Xintr_ioapic_edge25), _C_LABEL(Xrecurse_ioapic_edge25)
-	.long _C_LABEL(Xresume_ioapic_edge25)
-	.long _C_LABEL(Xintr_ioapic_edge26), _C_LABEL(Xrecurse_ioapic_edge26)
-	.long _C_LABEL(Xresume_ioapic_edge26)
-	.long _C_LABEL(Xintr_ioapic_edge27), _C_LABEL(Xrecurse_ioapic_edge27)
-	.long _C_LABEL(Xresume_ioapic_edge27)
-	.long _C_LABEL(Xintr_ioapic_edge28), _C_LABEL(Xrecurse_ioapic_edge28)
-	.long _C_LABEL(Xresume_ioapic_edge28)
-	.long _C_LABEL(Xintr_ioapic_edge29), _C_LABEL(Xrecurse_ioapic_edge29)
-	.long _C_LABEL(Xresume_ioapic_edge29)
-	.long _C_LABEL(Xintr_ioapic_edge30), _C_LABEL(Xrecurse_ioapic_edge30)
-	.long _C_LABEL(Xresume_ioapic_edge30)
-	.long _C_LABEL(Xintr_ioapic_edge31), _C_LABEL(Xrecurse_ioapic_edge31)
-	.long _C_LABEL(Xresume_ioapic_edge31)
-END(ioapic_edge_stubs)
-
-	.type	_C_LABEL(ioapic_level_stubs), @object
-LABEL(ioapic_level_stubs)
-	.long _C_LABEL(Xintr_ioapic_level0), _C_LABEL(Xrecurse_ioapic_level0)
-	.long _C_LABEL(Xresume_ioapic_level0)
-	.long _C_LABEL(Xintr_ioapic_level1), _C_LABEL(Xrecurse_ioapic_level1)
-	.long _C_LABEL(Xresume_ioapic_level1)
-	.long _C_LABEL(Xintr_ioapic_level2), _C_LABEL(Xrecurse_ioapic_level2)
-	.long _C_LABEL(Xresume_ioapic_level2)
-	.long _C_LABEL(Xintr_ioapic_level3), _C_LABEL(Xrecurse_ioapic_level3)
-	.long _C_LABEL(Xresume_ioapic_level3)
-	.long _C_LABEL(Xintr_ioapic_level4), _C_LABEL(Xrecurse_ioapic_level4)
-	.long _C_LABEL(Xresume_ioapic_level4)
-	.long _C_LABEL(Xintr_ioapic_level5), _C_LABEL(Xrecurse_ioapic_level5)
-	.long _C_LABEL(Xresume_ioapic_level5)
-	.long _C_LABEL(Xintr_ioapic_level6), _C_LABEL(Xrecurse_ioapic_level6)
-	.long _C_LABEL(Xresume_ioapic_level6)
-	.long _C_LABEL(Xintr_ioapic_level7), _C_LABEL(Xrecurse_ioapic_level7)
-	.long _C_LABEL(Xresume_ioapic_level7)
-	.long _C_LABEL(Xintr_ioapic_level8), _C_LABEL(Xrecurse_ioapic_level8)
-	.long _C_LABEL(Xresume_ioapic_level8)
-	.long _C_LABEL(Xintr_ioapic_level9), _C_LABEL(Xrecurse_ioapic_level9)
-	.long _C_LABEL(Xresume_ioapic_level9)
-	.long _C_LABEL(Xintr_ioapic_level10), _C_LABEL(Xrecurse_ioapic_level10)
-	.long _C_LABEL(Xresume_ioapic_level10)
-	.long _C_LABEL(Xintr_ioapic_level11), _C_LABEL(Xrecurse_ioapic_level11)
-	.long _C_LABEL(Xresume_ioapic_level11)
-	.long _C_LABEL(Xintr_ioapic_level12), _C_LABEL(Xrecurse_ioapic_level12)
-	.long _C_LABEL(Xresume_ioapic_level12)
-	.long _C_LABEL(Xintr_ioapic_level13), _C_LABEL(Xrecurse_ioapic_level13)
-	.long _C_LABEL(Xresume_ioapic_level13)
-	.long _C_LABEL(Xintr_ioapic_level14), _C_LABEL(Xrecurse_ioapic_level14)
-	.long _C_LABEL(Xresume_ioapic_level14)
-	.long _C_LABEL(Xintr_ioapic_level15), _C_LABEL(Xrecurse_ioapic_level15)
-	.long _C_LABEL(Xresume_ioapic_level15)
-	.long _C_LABEL(Xintr_ioapic_level16), _C_LABEL(Xrecurse_ioapic_level16)
-	.long _C_LABEL(Xresume_ioapic_level16)
-	.long _C_LABEL(Xintr_ioapic_level17), _C_LABEL(Xrecurse_ioapic_level17)
-	.long _C_LABEL(Xresume_ioapic_level17)
-	.long _C_LABEL(Xintr_ioapic_level18), _C_LABEL(Xrecurse_ioapic_level18)
-	.long _C_LABEL(Xresume_ioapic_level18)
-	.long _C_LABEL(Xintr_ioapic_level19), _C_LABEL(Xrecurse_ioapic_level19)
-	.long _C_LABEL(Xresume_ioapic_level19)
-	.long _C_LABEL(Xintr_ioapic_level20), _C_LABEL(Xrecurse_ioapic_level20)
-	.long _C_LABEL(Xresume_ioapic_level20)
-	.long _C_LABEL(Xintr_ioapic_level21), _C_LABEL(Xrecurse_ioapic_level21)
-	.long _C_LABEL(Xresume_ioapic_level21)
-	.long _C_LABEL(Xintr_ioapic_level22), _C_LABEL(Xrecurse_ioapic_level22)
-	.long _C_LABEL(Xresume_ioapic_level22)
-	.long _C_LABEL(Xintr_ioapic_level23), _C_LABEL(Xrecurse_ioapic_level23)
-	.long _C_LABEL(Xresume_ioapic_level23)
-	.long _C_LABEL(Xintr_ioapic_level24), _C_LABEL(Xrecurse_ioapic_level24)
-	.long _C_LABEL(Xresume_ioapic_level24)
-	.long _C_LABEL(Xintr_ioapic_level25), _C_LABEL(Xrecurse_ioapic_level25)
-	.long _C_LABEL(Xresume_ioapic_level25)
-	.long _C_LABEL(Xintr_ioapic_level26), _C_LABEL(Xrecurse_ioapic_level26)
-	.long _C_LABEL(Xresume_ioapic_level26)
-	.long _C_LABEL(Xintr_ioapic_level27), _C_LABEL(Xrecurse_ioapic_level27)
-	.long _C_LABEL(Xresume_ioapic_level27)
-	.long _C_LABEL(Xintr_ioapic_level28), _C_LABEL(Xrecurse_ioapic_level28)
-	.long _C_LABEL(Xresume_ioapic_level28)
-	.long _C_LABEL(Xintr_ioapic_level29), _C_LABEL(Xrecurse_ioapic_level29)
-	.long _C_LABEL(Xresume_ioapic_level29)
-	.long _C_LABEL(Xintr_ioapic_level30), _C_LABEL(Xrecurse_ioapic_level30)
-	.long _C_LABEL(Xresume_ioapic_level30)
-	.long _C_LABEL(Xintr_ioapic_level31), _C_LABEL(Xrecurse_ioapic_level31)
-	.long _C_LABEL(Xresume_ioapic_level31)
-END(ioapic_level_stubs)
-
-	.type	_C_LABEL(x2apic_edge_stubs), @object
-LABEL(x2apic_edge_stubs)
-	.long _C_LABEL(Xintr_x2apic_edge0), _C_LABEL(Xrecurse_x2apic_edge0)
-	.long _C_LABEL(Xresume_x2apic_edge0)
-	.long _C_LABEL(Xintr_x2apic_edge1), _C_LABEL(Xrecurse_x2apic_edge1)
-	.long _C_LABEL(Xresume_x2apic_edge1)
-	.long _C_LABEL(Xintr_x2apic_edge2), _C_LABEL(Xrecurse_x2apic_edge2)
-	.long _C_LABEL(Xresume_x2apic_edge2)
-	.long _C_LABEL(Xintr_x2apic_edge3), _C_LABEL(Xrecurse_x2apic_edge3)
-	.long _C_LABEL(Xresume_x2apic_edge3)
-	.long _C_LABEL(Xintr_x2apic_edge4), _C_LABEL(Xrecurse_x2apic_edge4)
-	.long _C_LABEL(Xresume_x2apic_edge4)
-	.long _C_LABEL(Xintr_x2apic_edge5), _C_LABEL(Xrecurse_x2apic_edge5)
-	.long _C_LABEL(Xresume_x2apic_edge5)
-	.long _C_LABEL(Xintr_x2apic_edge6), _C_LABEL(Xrecurse_x2apic_edge6)
-	.long _C_LABEL(Xresume_x2apic_edge6)
-	.long _C_LABEL(Xintr_x2apic_edge7), _C_LABEL(Xrecurse_x2apic_edge7)
-	.long _C_LABEL(Xresume_x2apic_edge7)
-	.long _C_LABEL(Xintr_x2apic_edge8), _C_LABEL(Xrecurse_x2apic_edge8)
-	.long _C_LABEL(Xresume_x2apic_edge8)
-	.long _C_LABEL(Xintr_x2apic_edge9), _C_LABEL(Xrecurse_x2apic_edge9)
-	.long _C_LABEL(Xresume_x2apic_edge9)
-	.long _C_LABEL(Xintr_x2apic_edge10), _C_LABEL(Xrecurse_x2apic_edge10)
-	.long _C_LABEL(Xresume_x2apic_edge10)
-	.long _C_LABEL(Xintr_x2apic_edge11), _C_LABEL(Xrecurse_x2apic_edge11)
-	.long _C_LABEL(Xresume_x2apic_edge11)
-	.long _C_LABEL(Xintr_x2apic_edge12), _C_LABEL(Xrecurse_x2apic_edge12)
-	.long _C_LABEL(Xresume_x2apic_edge12)
-	.long _C_LABEL(Xintr_x2apic_edge13), _C_LABEL(Xrecurse_x2apic_edge13)
-	.long _C_LABEL(Xresume_x2apic_edge13)
-	.long _C_LABEL(Xintr_x2apic_edge14), _C_LABEL(Xrecurse_x2apic_edge14)
-	.long _C_LABEL(Xresume_x2apic_edge14)
-	.long _C_LABEL(Xintr_x2apic_edge15), _C_LABEL(Xrecurse_x2apic_edge15)
-	.long _C_LABEL(Xresume_x2apic_edge15)
-	.long _C_LABEL(Xintr_x2apic_edge16), _C_LABEL(Xrecurse_x2apic_edge16)
-	.long _C_LABEL(Xresume_x2apic_edge16)
-	.long _C_LABEL(Xintr_x2apic_edge17), _C_LABEL(Xrecurse_x2apic_edge17)
-	.long _C_LABEL(Xresume_x2apic_edge17)
-	.long _C_LABEL(Xintr_x2apic_edge18), _C_LABEL(Xrecurse_x2apic_edge18)
-	.long _C_LABEL(Xresume_x2apic_edge18)
-	.long _C_LABEL(Xintr_x2apic_edge19), _C_LABEL(Xrecurse_x2apic_edge19)
-	.long _C_LABEL(Xresume_x2apic_edge19)
-	.long _C_LABEL(Xintr_x2apic_edge20), _C_LABEL(Xrecurse_x2apic_edge20)
-	.long _C_LABEL(Xresume_x2apic_edge20)
-	.long _C_LABEL(Xintr_x2apic_edge21), _C_LABEL(Xrecurse_x2apic_edge21)
-	.long _C_LABEL(Xresume_x2apic_edge21)
-	.long _C_LABEL(Xintr_x2apic_edge22), _C_LABEL(Xrecurse_x2apic_edge22)
-	.long _C_LABEL(Xresume_x2apic_edge22)
-	.long _C_LABEL(Xintr_x2apic_edge23), _C_LABEL(Xrecurse_x2apic_edge23)
-	.long _C_LABEL(Xresume_x2apic_edge23)
-	.long _C_LABEL(Xintr_x2apic_edge24), _C_LABEL(Xrecurse_x2apic_edge24)
-	.long _C_LABEL(Xresume_x2apic_edge24)
-	.long _C_LABEL(Xintr_x2apic_edge25), _C_LABEL(Xrecurse_x2apic_edge25)
-	.long _C_LABEL(Xresume_x2apic_edge25)
-	.long _C_LABEL(Xintr_x2apic_edge26), _C_LABEL(Xrecurse_x2apic_edge26)
-	.long _C_LABEL(Xresume_x2apic_edge26)
-	.long _C_LABEL(Xintr_x2apic_edge27), _C_LABEL(Xrecurse_x2apic_edge27)
-	.long _C_LABEL(Xresume_x2apic_edge27)
-	.long _C_LABEL(Xintr_x2apic_edge28), _C_LABEL(Xrecurse_x2apic_edge28)
-	.long _C_LABEL(Xresume_x2apic_edge28)
-	.long _C_LABEL(Xintr_x2apic_edge29), _C_LABEL(Xrecurse_x2apic_edge29)
-	.long _C_LABEL(Xresume_x2apic_edge29)
-	.long _C_LABEL(Xintr_x2apic_edge30), _C_LABEL(Xrecurse_x2apic_edge30)
-	.long _C_LABEL(Xresume_x2apic_edge30)
-	.long _C_LABEL(Xintr_x2apic_edge31), _C_LABEL(Xrecurse_x2apic_edge31)
-	.long _C_LABEL(Xresume_x2apic_edge31)
-END(x2apic_edge_stubs)
-
-	.type	_C_LABEL(x2apic_level_stubs), @object
-LABEL(x2apic_level_stubs)
-	.long _C_LABEL(Xintr_x2apic_level0), _C_LABEL(Xrecurse_x2apic_level0)
-	.long _C_LABEL(Xresume_x2apic_level0)
-	.long _C_LABEL(Xintr_x2apic_level1), _C_LABEL(Xrecurse_x2apic_level1)
-	.long _C_LABEL(Xresume_x2apic_level1)
-	.long _C_LABEL(Xintr_x2apic_level2), _C_LABEL(Xrecurse_x2apic_level2)
-	.long _C_LABEL(Xresume_x2apic_level2)
-	.long _C_LABEL(Xintr_x2apic_level3), _C_LABEL(Xrecurse_x2apic_level3)
-	.long _C_LABEL(Xresume_x2apic_level3)
-	.long _C_LABEL(Xintr_x2apic_level4), _C_LABEL(Xrecurse_x2apic_level4)
-	.long _C_LABEL(Xresume_x2apic_level4)
-	.long _C_LABEL(Xintr_x2apic_level5), _C_LABEL(Xrecurse_x2apic_level5)
-	.long _C_LABEL(Xresume_x2apic_level5)
-	.long _C_LABEL(Xintr_x2apic_level6), _C_LABEL(Xrecurse_x2apic_level6)
-	.long _C_LABEL(Xresume_x2apic_level6)
-	.long _C_LABEL(Xintr_x2apic_level7), _C_LABEL(Xrecurse_x2apic_level7)
-	.long _C_LABEL(Xresume_x2apic_level7)
-	.long _C_LABEL(Xintr_x2apic_level8), _C_LABEL(Xrecurse_x2apic_level8)
-	.long _C_LABEL(Xresume_x2apic_level8)
-	.long _C_LABEL(Xintr_x2apic_level9), _C_LABEL(Xrecurse_x2apic_level9)
-	.long _C_LABEL(Xresume_x2apic_level9)
-	.long _C_LABEL(Xintr_x2apic_level10), _C_LABEL(Xrecurse_x2apic_level10)
-	.long _C_LABEL(Xresume_x2apic_level10)
-	.long _C_LABEL(Xintr_x2apic_level11), _C_LABEL(Xrecurse_x2apic_level11)
-	.long _C_LABEL(Xresume_x2apic_level11)
-	.long _C_LABEL(Xintr_x2apic_level12), _C_LABEL(Xrecurse_x2apic_level12)
-	.long _C_LABEL(Xresume_x2apic_level12)
-	.long _C_LABEL(Xintr_x2apic_level13), _C_LABEL(Xrecurse_x2apic_level13)
-	.long _C_LABEL(Xresume_x2apic_level13)
-	.long _C_LABEL(Xintr_x2apic_level14), _C_LABEL(Xrecurse_x2apic_level14)
-	.long _C_LABEL(Xresume_x2apic_level14)
-	.long _C_LABEL(Xintr_x2apic_level15), _C_LABEL(Xrecurse_x2apic_level15)
-	.long _C_LABEL(Xresume_x2apic_level15)
-	.long _C_LABEL(Xintr_x2apic_level16), _C_LABEL(Xrecurse_x2apic_level16)
-	.long _C_LABEL(Xresume_x2apic_level16)
-	.long _C_LABEL(Xintr_x2apic_level17), _C_LABEL(Xrecurse_x2apic_level17)
-	.long _C_LABEL(Xresume_x2apic_level17)
-	.long _C_LABEL(Xintr_x2apic_level18), _C_LABEL(Xrecurse_x2apic_level18)
-	.long _C_LABEL(Xresume_x2apic_level18)
-	.long _C_LABEL(Xintr_x2apic_level19), _C_LABEL(Xrecurse_x2apic_level19)
-	.long _C_LABEL(Xresume_x2apic_level19)
-	.long _C_LABEL(Xintr_x2apic_level20), _C_LABEL(Xrecurse_x2apic_level20)
-	.long _C_LABEL(Xresume_x2apic_level20)
-	.long _C_LABEL(Xintr_x2apic_level21), _C_LABEL(Xrecurse_x2apic_level21)
-	.long _C_LABEL(Xresume_x2apic_level21)
-	.long _C_LABEL(Xintr_x2apic_level22), _C_LABEL(Xrecurse_x2apic_level22)
-	.long _C_LABEL(Xresume_x2apic_level22)
-	.long _C_LABEL(Xintr_x2apic_level23), _C_LABEL(Xrecurse_x2apic_level23)
-	.long _C_LABEL(Xresume_x2apic_level23)
-	.long _C_LABEL(Xintr_x2apic_level24), _C_LABEL(Xrecurse_x2apic_level24)
-	.long _C_LABEL(Xresume_x2apic_level24)
-	.long _C_LABEL(Xintr_x2apic_level25), _C_LABEL(Xrecurse_x2apic_level25)
-	.long _C_LABEL(Xresume_x2apic_level25)
-	.long _C_LABEL(Xintr_x2apic_level26), _C_LABEL(Xrecurse_x2apic_level26)
-	.long _C_LABEL(Xresume_x2apic_level26)
-	.long _C_LABEL(Xintr_x2apic_level27), _C_LABEL(Xrecurse_x2apic_level27)
-	.long _C_LABEL(Xresume_x2apic_level27)
-	.long _C_LABEL(Xintr_x2apic_level28), _C_LABEL(Xrecurse_x2apic_level28)
-	.long _C_LABEL(Xresume_x2apic_level28)
-	.long _C_LABEL(Xintr_x2apic_level29), _C_LABEL(Xrecurse_x2apic_level29)
-	.long _C_LABEL(Xresume_x2apic_level29)
-	.long _C_LABEL(Xintr_x2apic_level30), _C_LABEL(Xrecurse_x2apic_level30)
-	.long _C_LABEL(Xresume_x2apic_level30)
-	.long _C_LABEL(Xintr_x2apic_level31), _C_LABEL(Xrecurse_x2apic_level31)
-	.long _C_LABEL(Xresume_x2apic_level31)
-END(x2apic_level_stubs)
 #endif
+
+/*
+ * Create a struct intrstub.
+ */
+#define INTRSTUB_ENTRY(name) \
+	.long _C_LABEL(Xintr_ ## name ), _C_LABEL(Xrecurse_ ## name ) ; \
+	.long _C_LABEL(Xresume_ ## name ) ;
+
+/*
+ * Create an array of structs intrstub (16 entries).
+ */
+#define INTRSTUB_ARRAY_16(name) 		; \
+	.type _C_LABEL(name ## _stubs), @object	; \
+LABEL(name ## _stubs)				; \
+	INTRSTUB_ENTRY(name ## 0)		; \
+	INTRSTUB_ENTRY(name ## 1)		; \
+	INTRSTUB_ENTRY(name ## 2)		; \
+	INTRSTUB_ENTRY(name ## 3)		; \
+	INTRSTUB_ENTRY(name ## 4)		; \
+	INTRSTUB_ENTRY(name ## 5)		; \
+	INTRSTUB_ENTRY(name ## 6)		; \
+	INTRSTUB_ENTRY(name ## 7)		; \
+	INTRSTUB_ENTRY(name ## 8)		; \
+	INTRSTUB_ENTRY(name ## 9)		; \
+	INTRSTUB_ENTRY(name ## 10)		; \
+	INTRSTUB_ENTRY(name ## 11)		; \
+	INTRSTUB_ENTRY(name ## 12)		; \
+	INTRSTUB_ENTRY(name ## 13)		; \
+	INTRSTUB_ENTRY(name ## 14)		; \
+	INTRSTUB_ENTRY(name ## 15)		; \
+END(name ## _stubs)
+
+/*
+ * Create an array of structs intrstub (56 entries).
+ */
+#define INTRSTUB_ARRAY_56(name) 		; \
+	.type _C_LABEL(name ## _stubs), @object	; \
+LABEL(name ## _stubs)				; \
+	INTRSTUB_ENTRY(name ## 0)		; \
+	INTRSTUB_ENTRY(name ## 1)		; \
+	INTRSTUB_ENTRY(name ## 2)		; \
+	INTRSTUB_ENTRY(name ## 3)		; \
+	INTRSTUB_ENTRY(name ## 4)		; \
+	INTRSTUB_ENTRY(name ## 5)		; \
+	INTRSTUB_ENTRY(name ## 6)		; \
+	INTRSTUB_ENTRY(name ## 7)		; \
+	INTRSTUB_ENTRY(name ## 8)		; \
+	INTRSTUB_ENTRY(name ## 9)		; \
+	INTRSTUB_ENTRY(name ## 10)		; \
+	INTRSTUB_ENTRY(name ## 11)		; \
+	INTRSTUB_ENTRY(name ## 12)		; \
+	INTRSTUB_ENTRY(name ## 13)		; \
+	INTRSTUB_ENTRY(name ## 14)		; \
+	INTRSTUB_ENTRY(name ## 15)		; \
+	INTRSTUB_ENTRY(name ## 16)		; \
+	INTRSTUB_ENTRY(name ## 17)		; \
+	INTRSTUB_ENTRY(name ## 18)		; \
+	INTRSTUB_ENTRY(name ## 19)		; \
+	INTRSTUB_ENTRY(name ## 20)		; \
+	INTRSTUB_ENTRY(name ## 21)		; \
+	INTRSTUB_ENTRY(name ## 22)		; \
+	INTRSTUB_ENTRY(name ## 23)		; \
+	INTRSTUB_ENTRY(name ## 24)		; \
+	INTRSTUB_ENTRY(name ## 25)		; \
+	INTRSTUB_ENTRY(name ## 26)		; \
+	INTRSTUB_ENTRY(name ## 27)		; \
+	INTRSTUB_ENTRY(name ## 28)		; \
+	INTRSTUB_ENTRY(name ## 29)		; \
+	INTRSTUB_ENTRY(name ## 30)		; \
+	INTRSTUB_ENTRY(name ## 31)		; \
+	INTRSTUB_ENTRY(name ## 32)		; \
+	INTRSTUB_ENTRY(name ## 33)		; \
+	INTRSTUB_ENTRY(name ## 34)		; \
+	INTRSTUB_ENTRY(name ## 35)		; \
+	INTRSTUB_ENTRY(name ## 36)		; \
+	INTRSTUB_ENTRY(name ## 37)		; \
+	INTRSTUB_ENTRY(name ## 38)		; \
+	INTRSTUB_ENTRY(name ## 39)		; \
+	INTRSTUB_ENTRY(name ## 40)		; \
+	INTRSTUB_ENTRY(name ## 41)		; \
+	INTRSTUB_ENTRY(name ## 42)		; \
+	INTRSTUB_ENTRY(name ## 43)		; \
+	INTRSTUB_ENTRY(name ## 44)		; \
+	INTRSTUB_ENTRY(name ## 45)		; \
+	INTRSTUB_ENTRY(name ## 46)		; \
+	INTRSTUB_ENTRY(name ## 47)		; \
+	INTRSTUB_ENTRY(name ## 48)		; \
+	INTRSTUB_ENTRY(name ## 49)		; \
+	INTRSTUB_ENTRY(name ## 50)		; \
+	INTRSTUB_ENTRY(name ## 51)		; \
+	INTRSTUB_ENTRY(name ## 52)		; \
+	INTRSTUB_ENTRY(name ## 53)		; \
+	INTRSTUB_ENTRY(name ## 54)		; \
+	INTRSTUB_ENTRY(name ## 55)		; \
+END(name ## _stubs)
+
 #endif /* XENPV */
 
 #if defined(XEN)
@@ -950,7 +673,7 @@ IDTVEC(resume_ ## name ## sir)						\
 	movl	$IREENT_MAGIC,TF_ERR(%esp)				;\
 	pushl	%ebx							;\
 	movl	CPUVAR(ISOURCES) + (sir) * 4,%ebp			;\
-	movl	$level,CPUVAR(ILEVEL)					;\
+	movb	$level,CPUVAR(ILEVEL)					;\
 	IDEPTH_INCR /* leaves old %esp on stack	*/			;\
 	STI(%eax)							;\
 	movl	IS_HANDLERS(%ebp),%ebx					;\
@@ -1016,7 +739,8 @@ IDTVEC(hypervisor_pvhvm_callback)	
 	pushl	$0			/* dummy error code */
 	pushl	$T_ASTFLT
 	INTRENTRY
-	pushl	CPUVAR(ILEVEL)
+	movzbl	CPUVAR(ILEVEL),%eax
+	pushl	%eax
 	IDEPTH_INCR
 	/* IDEPTH_INCR puts %esp on stack; we use it as argument to
 	 * do_hypervisor_callback. But don't restore the stack after,
@@ -1049,4 +773,18 @@ ENTRY(failsafe_callback)
 	iret
 END(failsafe_callback)
 
+#else	/* XENPV */
+
+	.section .rodata
+
+INTRSTUB_ARRAY_16(legacy)
+
+#if NIOAPIC > 0
+INTRSTUB_ARRAY_56(ioapic_edge)
+INTRSTUB_ARRAY_56(ioapic_level)
+
+INTRSTUB_ARRAY_56(x2apic_edge)
+INTRSTUB_ARRAY_56(x2apic_level)
+#endif
+
 #endif /* XENPV */

Index: src/sys/arch/x86/include/cpu.h
diff -u src/sys/arch/x86/include/cpu.h:1.132 src/sys/arch/x86/include/cpu.h:1.133
--- src/sys/arch/x86/include/cpu.h:1.132	Thu Oct  7 13:04:18 2021
+++ src/sys/arch/x86/include/cpu.h	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: cpu.h,v 1.132 2021/10/07 13:04:18 msaitoh Exp $	*/
+/*	$NetBSD: cpu.h,v 1.133 2022/09/07 00:40:18 knakahara Exp $	*/
 
 /*
  * Copyright (c) 1990 The Regents of the University of California.
@@ -139,18 +139,19 @@ struct cpu_info {
 	volatile int	ci_mtx_oldspl;	/* Old SPL at this ci_idepth */
 
 	/* The following must be aligned for cmpxchg8b. */
-	struct {
-		uint32_t	ipending;
-		int		ilevel;
-		uint32_t	imasked;
-	} ci_istate __aligned(8);
-#define ci_ipending	ci_istate.ipending
-#define	ci_ilevel	ci_istate.ilevel
-#define	ci_imasked	ci_istate.imasked
+	union {
+		uint64_t	ci_istate;
+		struct {
+			uint64_t	ci_ipending:56;
+			uint64_t	ci_ilevel:8;
+		};
+	} __aligned(8);
+	uint64_t	ci_imasked;
+
 	int		ci_idepth;
 	void *		ci_intrstack;
-	uint32_t	ci_imask[NIPL];
-	uint32_t	ci_iunmask[NIPL];
+	uint64_t	ci_imask[NIPL];
+	uint64_t	ci_iunmask[NIPL];
 
 	uint32_t	ci_signature;	/* X86 cpuid type (cpuid.1.%eax) */
 	uint32_t	ci_vendor[4];	/* vendor string */

Index: src/sys/arch/x86/include/intr.h
diff -u src/sys/arch/x86/include/intr.h:1.65 src/sys/arch/x86/include/intr.h:1.66
--- src/sys/arch/x86/include/intr.h:1.65	Tue May 24 15:55:19 2022
+++ src/sys/arch/x86/include/intr.h	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: intr.h,v 1.65 2022/05/24 15:55:19 bouyer Exp $	*/
+/*	$NetBSD: intr.h,v 1.66 2022/09/07 00:40:18 knakahara Exp $	*/
 
 /*-
  * Copyright (c) 1998, 2001, 2006, 2007, 2008, 2019 The NetBSD Foundation, Inc.
@@ -149,9 +149,6 @@ struct intrhand {
 	char	ih_xname[INTRDEVNAMEBUF];
 };
 
-#define IMASK(ci,level) (ci)->ci_imask[(level)]
-#define IUNMASK(ci,level) (ci)->ci_iunmask[(level)]
-
 #ifdef _KERNEL
 
 void Xspllower(int);

Index: src/sys/arch/x86/include/intrdefs.h
diff -u src/sys/arch/x86/include/intrdefs.h:1.25 src/sys/arch/x86/include/intrdefs.h:1.26
--- src/sys/arch/x86/include/intrdefs.h:1.25	Thu Mar 18 01:50:12 2021
+++ src/sys/arch/x86/include/intrdefs.h	Wed Sep  7 00:40:18 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: intrdefs.h,v 1.25 2021/03/18 01:50:12 nonaka Exp $	*/
+/*	$NetBSD: intrdefs.h,v 1.26 2022/09/07 00:40:18 knakahara Exp $	*/
 
 #ifndef _X86_INTRDEFS_H_
 #define _X86_INTRDEFS_H_
@@ -25,8 +25,8 @@
  * Local APIC masks and software interrupt masks, in order
  * of priority.  Must not conflict with SIR_* below.
  */
-#define LIR_IPI		31
-#define LIR_TIMER	30
+#define LIR_IPI		55
+#define LIR_TIMER	54
 
 /*
  * XXX These should be lowest numbered, but right now would
@@ -47,12 +47,12 @@
 #define XEN_IPL2SIR(ipl) ((ipl) + (SIR_XENIPL_VM - IPL_VM))
 
 /*
- * Maximum # of interrupt sources per CPU. 32 to fit in one word.
+ * Maximum # of interrupt sources per CPU. Bitmask must still fit in one quad.
  * ioapics can theoretically produce more, but it's not likely to
  * happen. For multiple ioapics, things can be routed to different
  * CPUs.
  */
-#define MAX_INTR_SOURCES	32
+#define MAX_INTR_SOURCES	56
 #define NUM_LEGACY_IRQS		16
 
 /*

Index: src/sys/arch/x86/x86/intr.c
diff -u src/sys/arch/x86/x86/intr.c:1.160 src/sys/arch/x86/x86/intr.c:1.161
--- src/sys/arch/x86/x86/intr.c:1.160	Sat Mar 12 15:50:45 2022
+++ src/sys/arch/x86/x86/intr.c	Wed Sep  7 00:40:19 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: intr.c,v 1.160 2022/03/12 15:50:45 riastradh Exp $	*/
+/*	$NetBSD: intr.c,v 1.161 2022/09/07 00:40:19 knakahara Exp $	*/
 
 /*
  * Copyright (c) 2007, 2008, 2009, 2019 The NetBSD Foundation, Inc.
@@ -133,7 +133,7 @@
  */
 
 #include <sys/cdefs.h>
-__KERNEL_RCSID(0, "$NetBSD: intr.c,v 1.160 2022/03/12 15:50:45 riastradh Exp $");
+__KERNEL_RCSID(0, "$NetBSD: intr.c,v 1.161 2022/09/07 00:40:19 knakahara Exp $");
 
 #include "opt_intrdebug.h"
 #include "opt_multiprocessor.h"
@@ -149,7 +149,6 @@ __KERNEL_RCSID(0, "$NetBSD: intr.c,v 1.1
 #include <sys/errno.h>
 #include <sys/intr.h>
 #include <sys/cpu.h>
-#include <sys/atomic.h>
 #include <sys/xcall.h>
 #include <sys/interrupt.h>
 #include <sys/reboot.h> /* for AB_VERBOSE */
@@ -1007,7 +1006,7 @@ intr_mask_xcall(void *arg1, void *arg2)
 			 * need to explicitly handle interrupts that
 			 * happened when when the source was masked.
 			 */
-			const uint32_t bit = (1U << ih->ih_slot);
+			const uint64_t bit = (1U << ih->ih_slot);
 			if (ci->ci_imasked & bit) {
 				ci->ci_imasked &= ~bit;
 				if (source->is_type != IST_LEVEL) {
@@ -1115,7 +1114,9 @@ intr_disestablish_xcall(void *arg1, void
 	idtvec = source->is_idtvec;
 
 	(*pic->pic_hwmask)(pic, ih->ih_pin);
-	atomic_and_32(&ci->ci_ipending, ~(1 << ih->ih_slot));
+	membar_sync();
+	ci->ci_ipending &= ~(1ULL << ih->ih_slot);
+	membar_sync();
 
 	/*
 	 * Remove the handler from the chain.
@@ -1421,8 +1422,8 @@ intr_printconfig(void)
 	for (CPU_INFO_FOREACH(cii, ci)) {
 		(*pr)("%s: interrupt masks:\n", device_xname(ci->ci_dev));
 		for (i = 0; i < NIPL; i++)
-			(*pr)("IPL %d mask %08lx unmask %08lx\n", i,
-			    (u_long)ci->ci_imask[i], (u_long)ci->ci_iunmask[i]);
+			(*pr)("IPL %d mask %016"PRIx64" unmask %016"PRIx64"\n",
+			    i, ci->ci_imask[i], ci->ci_iunmask[i]);
 		for (i = 0; i < MAX_INTR_SOURCES; i++) {
 			isp = ci->ci_isources[i];
 			if (isp == NULL)
@@ -1922,8 +1923,10 @@ intr_set_affinity(struct intrsource *isp
 
 	pin = isp->is_pin;
 	(*pic->pic_hwmask)(pic, pin); /* for ci_ipending check */
-	while (oldci->ci_ipending & (1 << oldslot)) {
+	membar_sync();
+	while (oldci->ci_ipending & (1ULL << oldslot)) {
 		(void)kpause("intrdist", false, 1, &cpu_lock);
+		membar_sync();
 	}
 
 	kpreempt_disable();

Index: src/sys/arch/x86/x86/lapic.c
diff -u src/sys/arch/x86/x86/lapic.c:1.88 src/sys/arch/x86/x86/lapic.c:1.89
--- src/sys/arch/x86/x86/lapic.c:1.88	Sat Aug 20 23:48:51 2022
+++ src/sys/arch/x86/x86/lapic.c	Wed Sep  7 00:40:19 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: lapic.c,v 1.88 2022/08/20 23:48:51 riastradh Exp $	*/
+/*	$NetBSD: lapic.c,v 1.89 2022/09/07 00:40:19 knakahara Exp $	*/
 
 /*-
  * Copyright (c) 2000, 2008, 2020 The NetBSD Foundation, Inc.
@@ -32,7 +32,7 @@
  */
 
 #include <sys/cdefs.h>
-__KERNEL_RCSID(0, "$NetBSD: lapic.c,v 1.88 2022/08/20 23:48:51 riastradh Exp $");
+__KERNEL_RCSID(0, "$NetBSD: lapic.c,v 1.89 2022/09/07 00:40:19 knakahara Exp $");
 
 #include "acpica.h"
 #include "ioapic.h"
@@ -548,7 +548,7 @@ lapic_get_timecount(struct timecounter *
 		if (lapic_readreg(reg) & (1 << (LAPIC_TIMER_VECTOR % 32))) {
 			cur_timer -= lapic_tval;
 		}
-	} else if (ci->ci_ipending & (1 << LIR_TIMER))
+	} else if (ci->ci_ipending & (1ULL << LIR_TIMER))
 		cur_timer = lapic_gettick() - lapic_tval;
 	cur_timer = ci->ci_lapic_counter - cur_timer;
 	splx(s);

Index: src/sys/arch/x86/x86/x86_softintr.c
diff -u src/sys/arch/x86/x86/x86_softintr.c:1.3 src/sys/arch/x86/x86/x86_softintr.c:1.4
--- src/sys/arch/x86/x86/x86_softintr.c:1.3	Fri May  8 21:43:54 2020
+++ src/sys/arch/x86/x86/x86_softintr.c	Wed Sep  7 00:40:19 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: x86_softintr.c,v 1.3 2020/05/08 21:43:54 ad Exp $	*/
+/*	$NetBSD: x86_softintr.c,v 1.4 2022/09/07 00:40:19 knakahara Exp $	*/
 
 /*
  * Copyright (c) 2007, 2008, 2009, 2019 The NetBSD Foundation, Inc.
@@ -133,7 +133,7 @@
  */
 
 #include <sys/cdefs.h>
-__KERNEL_RCSID(0, "$NetBSD: x86_softintr.c,v 1.3 2020/05/08 21:43:54 ad Exp $");
+__KERNEL_RCSID(0, "$NetBSD: x86_softintr.c,v 1.4 2022/09/07 00:40:19 knakahara Exp $");
 
 #include <sys/kmem.h>
 #include <sys/proc.h>
@@ -162,11 +162,12 @@ struct pic softintr_pic = {
 void
 x86_intr_calculatemasks(struct cpu_info *ci)
 {
-	int irq, level, unusedirqs, intrlevel[MAX_INTR_SOURCES];
+	uint64_t unusedirqs, intrlevel[MAX_INTR_SOURCES];
+	int irq, level;
 	struct intrhand *q;
 
 	/* First, figure out which levels each IRQ uses. */
-	unusedirqs = 0xffffffff;
+	unusedirqs = UINT64_MAX;
 	for (irq = 0; irq < MAX_INTR_SOURCES; irq++) {
 		int levels = 0;
 
@@ -175,18 +176,18 @@ x86_intr_calculatemasks(struct cpu_info 
 			continue;
 		}
 		for (q = ci->ci_isources[irq]->is_handlers; q; q = q->ih_next)
-			levels |= 1U << q->ih_level;
+			levels |= 1 << q->ih_level;
 		intrlevel[irq] = levels;
 		if (levels)
-			unusedirqs &= ~(1U << irq);
+			unusedirqs &= ~(1ULL << irq);
 	}
 
 	/* Then figure out which IRQs use each level. */
 	for (level = 0; level < NIPL; level++) {
-		int irqs = 0;
+		uint64_t irqs = 0;
 		for (irq = 0; irq < MAX_INTR_SOURCES; irq++)
-			if (intrlevel[irq] & (1U << level))
-				irqs |= 1U << irq;
+			if (intrlevel[irq] & (1ULL << level))
+				irqs |= 1ULL << irq;
 		ci->ci_imask[level] = irqs | unusedirqs;
 	}
 

Index: src/sys/arch/xen/include/hypervisor.h
diff -u src/sys/arch/xen/include/hypervisor.h:1.54 src/sys/arch/xen/include/hypervisor.h:1.55
--- src/sys/arch/xen/include/hypervisor.h:1.54	Wed May 25 14:35:15 2022
+++ src/sys/arch/xen/include/hypervisor.h	Wed Sep  7 00:40:19 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: hypervisor.h,v 1.54 2022/05/25 14:35:15 bouyer Exp $	*/
+/*	$NetBSD: hypervisor.h,v 1.55 2022/09/07 00:40:19 knakahara Exp $	*/
 
 /*
  * Copyright (c) 2006 Manuel Bouyer.
@@ -183,7 +183,7 @@ void hypervisor_unmask_event(unsigned in
 void hypervisor_mask_event(unsigned int);
 void hypervisor_clear_event(unsigned int);
 void hypervisor_enable_sir(unsigned int);
-void hypervisor_set_ipending(uint32_t, int, int);
+void hypervisor_set_ipending(uint64_t, int, int);
 void hypervisor_machdep_attach(void);
 void hypervisor_machdep_resume(void);
 

Index: src/sys/arch/xen/include/intr.h
diff -u src/sys/arch/xen/include/intr.h:1.59 src/sys/arch/xen/include/intr.h:1.60
--- src/sys/arch/xen/include/intr.h:1.59	Mon May 23 15:03:05 2022
+++ src/sys/arch/xen/include/intr.h	Wed Sep  7 00:40:19 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: intr.h,v 1.59 2022/05/23 15:03:05 bouyer Exp $	*/
+/*	$NetBSD: intr.h,v 1.60 2022/09/07 00:40:19 knakahara Exp $	*/
 /*	NetBSD intr.h,v 1.15 2004/10/31 10:39:34 yamt Exp	*/
 
 /*-
@@ -54,7 +54,7 @@ struct cpu_info;
 
 struct evtsource {
 	int ev_maxlevel;		/* max. IPL for this source */
-	uint32_t ev_imask;		/* interrupt mask */
+	uint64_t ev_imask;		/* interrupt mask */
 	struct intrhand *ev_handlers;	/* handler chain */
 	struct evcnt ev_evcnt;		/* interrupt counter */
 	struct cpu_info *ev_cpu;        /* cpu on which this event is bound */

Index: src/sys/arch/xen/x86/hypervisor_machdep.c
diff -u src/sys/arch/xen/x86/hypervisor_machdep.c:1.44 src/sys/arch/xen/x86/hypervisor_machdep.c:1.45
--- src/sys/arch/xen/x86/hypervisor_machdep.c:1.44	Sat Aug 20 23:48:51 2022
+++ src/sys/arch/xen/x86/hypervisor_machdep.c	Wed Sep  7 00:40:19 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: hypervisor_machdep.c,v 1.44 2022/08/20 23:48:51 riastradh Exp $	*/
+/*	$NetBSD: hypervisor_machdep.c,v 1.45 2022/09/07 00:40:19 knakahara Exp $	*/
 
 /*
  *
@@ -54,7 +54,7 @@
 
 
 #include <sys/cdefs.h>
-__KERNEL_RCSID(0, "$NetBSD: hypervisor_machdep.c,v 1.44 2022/08/20 23:48:51 riastradh Exp $");
+__KERNEL_RCSID(0, "$NetBSD: hypervisor_machdep.c,v 1.45 2022/09/07 00:40:19 knakahara Exp $");
 
 #include <sys/param.h>
 #include <sys/systm.h>
@@ -274,7 +274,7 @@ do_hypervisor_callback(struct intrframe 
 	volatile shared_info_t *s = HYPERVISOR_shared_info;
 	struct cpu_info *ci;
 	volatile struct vcpu_info *vci;
-	int level __diagused;
+	uint64_t level __diagused;
 
 	ci = curcpu();
 	vci = ci->ci_vcpu;
@@ -304,9 +304,9 @@ do_hypervisor_callback(struct intrframe 
 
 #ifdef DIAGNOSTIC
 	if (level != ci->ci_ilevel)
-		printf("hypervisor done %08x level %d/%d ipending %08x\n",
+		printf("hypervisor done %08x level %" PRIu64 "/%" PRIu64 " ipending %0" PRIx64 "\n",
 		    (uint)vci->evtchn_pending_sel,
-		    level, ci->ci_ilevel, ci->ci_ipending);
+		    level, (uint64_t)ci->ci_ilevel, (uint64_t)ci->ci_ipending);
 #endif
 }
 
@@ -419,7 +419,7 @@ hypervisor_enable_sir(unsigned int sir)
 }
 
 void
-hypervisor_set_ipending(uint32_t imask, int l1, int l2)
+hypervisor_set_ipending(uint64_t imask, int l1, int l2)
 {
 
 	/* This function is not re-entrant */

Index: src/sys/arch/xen/xen/evtchn.c
diff -u src/sys/arch/xen/xen/evtchn.c:1.99 src/sys/arch/xen/xen/evtchn.c:1.100
--- src/sys/arch/xen/xen/evtchn.c:1.99	Wed May 25 14:35:15 2022
+++ src/sys/arch/xen/xen/evtchn.c	Wed Sep  7 00:40:19 2022
@@ -1,4 +1,4 @@
-/*	$NetBSD: evtchn.c,v 1.99 2022/05/25 14:35:15 bouyer Exp $	*/
+/*	$NetBSD: evtchn.c,v 1.100 2022/09/07 00:40:19 knakahara Exp $	*/
 
 /*
  * Copyright (c) 2006 Manuel Bouyer.
@@ -54,7 +54,7 @@
 
 
 #include <sys/cdefs.h>
-__KERNEL_RCSID(0, "$NetBSD: evtchn.c,v 1.99 2022/05/25 14:35:15 bouyer Exp $");
+__KERNEL_RCSID(0, "$NetBSD: evtchn.c,v 1.100 2022/09/07 00:40:19 knakahara Exp $");
 
 #include "opt_xen.h"
 #include "isa.h"
@@ -320,7 +320,7 @@ evtchn_do_event(int evtch, struct intrfr
 	int ilevel;
 	struct intrhand *ih;
 	int	(*ih_fun)(void *, void *);
-	uint32_t iplmask;
+	uint64_t iplmask;
 
 	KASSERTMSG(evtch >= 0, "negative evtch: %d", evtch);
 	KASSERTMSG(evtch < NR_EVENT_CHANNELS,
@@ -381,7 +381,7 @@ evtchn_do_event(int evtch, struct intrfr
 #if 0
 		if (ih->ih_cpu != ci) {
 			hypervisor_send_event(ih->ih_cpu, evtch);
-			iplmask &= ~(1 << XEN_IPL2SIR(ih->ih_level));
+			iplmask &= ~(1ULL << XEN_IPL2SIR(ih->ih_level));
 			ih = ih->ih_evt_next;
 			continue;
 		}
@@ -401,7 +401,7 @@ evtchn_do_event(int evtch, struct intrfr
 			}
 			goto splx;
 		}
-		iplmask &= ~(1 << XEN_IPL2SIR(ih->ih_level));
+		iplmask &= ~(1ULL << XEN_IPL2SIR(ih->ih_level));
 		ci->ci_ilevel = ih->ih_level;
 		ih->ih_pending = 0;
 		ih_fun = (void *)ih->ih_fun;

Index: src/sys/arch/xen/xen/xenevt.c
diff -u src/sys/arch/xen/xen/xenevt.c:1.66 src/sys/arch/xen/xen/xenevt.c:1.67
--- src/sys/arch/xen/xen/xenevt.c:1.66	Sun Sep  4 11:20:33 2022
+++ src/sys/arch/xen/xen/xenevt.c	Wed Sep  7 00:40:19 2022
@@ -1,4 +1,4 @@
-/*      $NetBSD: xenevt.c,v 1.66 2022/09/04 11:20:33 bouyer Exp $      */
+/*      $NetBSD: xenevt.c,v 1.67 2022/09/07 00:40:19 knakahara Exp $      */
 
 /*
  * Copyright (c) 2005 Manuel Bouyer.
@@ -26,7 +26,7 @@
  */
 
 #include <sys/cdefs.h>
-__KERNEL_RCSID(0, "$NetBSD: xenevt.c,v 1.66 2022/09/04 11:20:33 bouyer Exp $");
+__KERNEL_RCSID(0, "$NetBSD: xenevt.c,v 1.67 2022/09/07 00:40:19 knakahara Exp $");
 
 #include "opt_xen.h"
 #include <sys/param.h>
@@ -210,7 +210,7 @@ xenevt_setipending(int l1, int l2)
 	KASSERT(xenevt_ih->ih_cpu->ci_ilevel >= IPL_HIGH);
 	atomic_or_ulong(&xenevt_ev1, 1UL << l1);
 	atomic_or_ulong(&xenevt_ev2[l1], 1UL << l2);
-	atomic_or_32(&xenevt_ih->ih_cpu->ci_ipending, 1 << SIR_XENIPL_HIGH);
+	atomic_or_64(&xenevt_ih->ih_cpu->ci_istate, (1ULL << SIR_XENIPL_HIGH) << 8);
 	atomic_add_int(&xenevt_ih->ih_pending, 1);
 	evtsource[xenevt_ev]->ev_evcnt.ev_count++;
 }

Reply via email to