# Check 64bit APX NDD instructions with evex prefix encoding

	.allow_index_reg
	.text
_start:
	adc    $0x1234,%ax,%r30w
	adc    %r15b,%r17b,%r18b
	adc    %r15d,(%r8),%r18d
	adc    (%r15,%rax,1),%r16b,%r8b
	adc    (%r15,%rax,1),%r16w,%r8w
	adcl   $0x11,(%r19,%rax,4),%r20d
	adcx   %r15d,%r8d,%r18d
	adcx   (%r15,%r31,1),%r8
	adcx   (%r15,%r31,1),%r8d,%r18d
	add    $0x1234,%ax,%r30w
	add    $0x12344433,%r15,%r16
	add    $0x34,%r13b,%r17b
	add    $0xfffffffff4332211,%rax,%r8
	add    %r31,%r8,%r16
	add    %r31,(%r8),%r16
	add    %r31,(%r8,%r16,8),%r16
	add    %r31b,%r8b,%r16b
	add    %r31d,%r8d,%r16d
	add    %r31w,%r8w,%r16w
	add    (%r31),%r8,%r16
	add    0x9090(%r31,%r16,1),%r8,%r16
	addb   %r31b,%r8b,%r16b
	addl   %r31d,%r8d,%r16d
	addl   $0x11,(%r19,%rax,4),%r20d
	addq   %r31,%r8,%r16
	addq   $0x12344433,(%r15,%rcx,4),%r16
	addw   %r31w,%r8w,%r16w
	adox   %r15d,%r8d,%r18d
	{load}  add    %r31,%r8,%r16
	{store} add    %r31,%r8,%r16
	adox   (%r15,%r31,1),%r8
	adox   (%r15,%r31,1),%r8d,%r18d
	and    $0x1234,%ax,%r30w
	and    %r15b,%r17b,%r18b
	and    %r15d,(%r8),%r18d
	and    (%r15,%rax,1),%r16b,%r8b
	and    (%r15,%rax,1),%r16w,%r8w
	andl   $0x11,(%r19,%rax,4),%r20d
	cmova  0x90909090(%eax),%edx,%r8d
	cmovae 0x90909090(%eax),%edx,%r8d
	cmovb  0x90909090(%eax),%edx,%r8d
	cmovbe 0x90909090(%eax),%edx,%r8d
	cmove  0x90909090(%eax),%edx,%r8d
	cmovg  0x90909090(%eax),%edx,%r8d
	cmovge 0x90909090(%eax),%edx,%r8d
	cmovl  0x90909090(%eax),%edx,%r8d
	cmovle 0x90909090(%eax),%edx,%r8d
	cmovne 0x90909090(%eax),%edx,%r8d
	cmovno 0x90909090(%eax),%edx,%r8d
	cmovnp 0x90909090(%eax),%edx,%r8d
	cmovns 0x90909090(%eax),%edx,%r8d
	cmovo  0x90909090(%eax),%edx,%r8d
	cmovp  0x90909090(%eax),%edx,%r8d
	cmovs  0x90909090(%eax),%edx,%r8d
	dec    %rax,%r17
	decb   (%r31,%r12,1),%r8b
	imul   0x909(%rax,%r31,8),%rdx,%r25
	imul   0x90909(%eax),%edx,%r8d
	inc    %r31,%r16
	inc    %r31,%r8
	inc    %rax,%rbx
	neg    %rax,%r17
	negb   (%r31,%r12,1),%r8b
	not    %rax,%r17
	notb   (%r31,%r12,1),%r8b
	or     $0x1234,%ax,%r30w
	or     %r15b,%r17b,%r18b
	or     %r15d,(%r8),%r18d
	or     (%r15,%rax,1),%r16b,%r8b
	or     (%r15,%rax,1),%r16w,%r8w
	orl    $0x11,(%r19,%rax,4),%r20d
	rcl    $0x2,%r12b,%r31b
	rcl    %cl,%r16b,%r8b
	rclb   $0x1,(%rax),%r31b
	rcll   $0x2,(%rax),%r31d
	rclw   $0x1,(%rax),%r31w
	rclw   %cl,(%r19,%rax,4),%r31w
	rcr    $0x2,%r12b,%r31b
	rcr    %cl,%r16b,%r8b
	rcrb   $0x1,(%rax),%r31b
	rcrl   $0x2,(%rax),%r31d
	rcrw   $0x1,(%rax),%r31w
	rcrw   %cl,(%r19,%rax,4),%r31w
	rol    $0x2,%r12b,%r31b
	rol    %cl,%r16b,%r8b
	rolb   $0x1,(%rax),%r31b
	roll   $0x2,(%rax),%r31d
	rolw   $0x1,(%rax),%r31w
	rolw   %cl,(%r19,%rax,4),%r31w
	ror    $0x2,%r12b,%r31b
	ror    %cl,%r16b,%r8b
	rorb   $0x1,(%rax),%r31b
	rorl   $0x2,(%rax),%r31d
	rorw   $0x1,(%rax),%r31w
	rorw   %cl,(%r19,%rax,4),%r31w
	sar    $0x2,%r12b,%r31b
	sar    %cl,%r16b,%r8b
	sarb   $0x1,(%rax),%r31b
	sarl   $0x2,(%rax),%r31d
	sarw   $0x1,(%rax),%r31w
	sarw   %cl,(%r19,%rax,4),%r31w
	sbb    $0x1234,%ax,%r30w
	sbb    %r15b,%r17b,%r18b
	sbb    %r15d,(%r8),%r18d
	sbb    (%r15,%rax,1),%r16b,%r8b
	sbb    (%r15,%rax,1),%r16w,%r8w
	sbbl   $0x11,(%r19,%rax,4),%r20d
	shl    $0x2,%r12b,%r31b
	shl    $0x2,%r12b,%r31b
	shl    %cl,%r16b,%r8b
	shl    %cl,%r16b,%r8b
	shlb   $0x1,(%rax),%r31b
	shlb   $0x1,(%rax),%r31b
	shld   $0x1,%r12,(%rax),%r31
	shld   $0x2,%r15d,(%rax),%r31d
	shld   $0x2,%r8w,%r12w,%r31w
	shld   %cl,%r12,%r16,%r8
	shld   %cl,%r13w,(%r19,%rax,4),%r31w
	shld   %cl,%r9w,(%rax),%r31w
	shll   $0x2,(%rax),%r31d
	shll   $0x2,(%rax),%r31d
	shlw   $0x1,(%rax),%r31w
	shlw   $0x1,(%rax),%r31w
	shlw   %cl,(%r19,%rax,4),%r31w
	shlw   %cl,(%r19,%rax,4),%r31w
	shr    $0x2,%r12b,%r31b
	shr    %cl,%r16b,%r8b
	shrb   $0x1,(%rax),%r31b
	shrd   $0x1,%r12,(%rax),%r31
	shrd   $0x2,%r15d,(%rax),%r31d
	shrd   $0x2,%r8w,%r12w,%r31w
	shrd   %cl,%r12,%r16,%r8
	shrd   %cl,%r13w,(%r19,%rax,4),%r31w
	shrd   %cl,%r9w,(%rax),%r31w
	shrl   $0x2,(%rax),%r31d
	shrw   $0x1,(%rax),%r31w
	shrw   %cl,(%r19,%rax,4),%r31w
	sub    $0x1234,%ax,%r30w
	sub    %r15b,%r17b,%r18b
	sub    %r15d,(%r8),%r18d
	sub    (%r15,%rax,1),%r16b,%r8b
	sub    (%r15,%rax,1),%r16w,%r8w
	subl   $0x11,(%r19,%rax,4),%r20d
	xor    $0x1234,%ax,%r30w
	xor    %r15b,%r17b,%r18b
	xor    %r15d,(%r8),%r18d
	xor    (%r15,%rax,1),%r16b,%r8b
	xor    (%r15,%rax,1),%r16w,%r8w
	xorl   $0x11,(%r19,%rax,4),%r20d

