Skip to content

Instantly share code, notes, and snippets.

@Cr4sh
Last active February 14, 2026 14:20
Show Gist options
  • Select an option

  • Save Cr4sh/a49254ae5ca2a91b488574b6aaae5c29 to your computer and use it in GitHub Desktop.

Select an option

Save Cr4sh/a49254ae5ca2a91b488574b6aaae5c29 to your computer and use it in GitHub Desktop.
OpenREIL tests
0000000000003320 <_aes128_encrypt>:
3320: 55 pushq %rbp
3321: 48 89 e5 movq %rsp, %rbp
3324: 41 57 pushq %r15
3326: 41 56 pushq %r14
3328: 41 55 pushq %r13
332a: 41 54 pushq %r12
332c: 53 pushq %rbx
332d: 48 81 ec f8 00 00 00 subq $248, %rsp
3334: 48 89 f0 movq %rsi, %rax
3337: 48 89 fb movq %rdi, %rbx
333a: 48 8d b5 e0 fe ff ff leaq -288(%rbp), %rsi
3341: 48 89 c7 movq %rax, %rdi
3344: e8 e7 04 00 00 callq 0x3830 <_key_expansion>
3349: 48 89 5d a8 movq %rbx, -88(%rbp)
334d: c5 fa 6f 03 vmovdqu (%rbx), %xmm0
3351: c5 f9 ef 85 e0 fe ff ff vpxor -288(%rbp), %xmm0, %xmm0
3359: b8 10 00 00 00 movl $16, %eax
335e: c5 f9 6f 0d ba 09 00 00 vmovdqa 2490(%rip), %xmm1 ## 0x3d20
3366: c5 f9 6f 15 c2 09 00 00 vmovdqa 2498(%rip), %xmm2 ## 0x3d30
336e: 66 90 nop
3370: 48 89 45 90 movq %rax, -112(%rbp)
3374: c4 e3 79 14 c0 00 vpextrb $0, %xmm0, %eax
337a: 4c 8d 35 0f 0a 00 00 leaq 2575(%rip), %r14 ## 0x3d90 <_sbox>
3381: 46 0f b6 2c 30 movzbl (%rax,%r14), %r13d
3386: c4 e3 79 14 c0 02 vpextrb $2, %xmm0, %eax
338c: c4 e3 79 14 c1 03 vpextrb $3, %xmm0, %ecx
3392: 42 0f b6 04 30 movzbl (%rax,%r14), %eax
3397: 48 89 45 a0 movq %rax, -96(%rbp)
339b: 42 0f b6 04 31 movzbl (%rcx,%r14), %eax
33a0: 48 89 45 b8 movq %rax, -72(%rbp)
33a4: c4 e3 79 14 c0 04 vpextrb $4, %xmm0, %eax
33aa: c4 e3 79 14 c1 05 vpextrb $5, %xmm0, %ecx
33b0: 46 0f b6 14 30 movzbl (%rax,%r14), %r10d
33b5: 46 0f b6 24 31 movzbl (%rcx,%r14), %r12d
33ba: c4 e3 79 14 c0 07 vpextrb $7, %xmm0, %eax
33c0: c4 e3 79 14 c1 08 vpextrb $8, %xmm0, %ecx
33c6: 46 0f b6 0c 30 movzbl (%rax,%r14), %r9d
33cb: 46 0f b6 04 31 movzbl (%rcx,%r14), %r8d
33d0: c4 e3 79 14 c0 0a vpextrb $10, %xmm0, %eax
33d6: c4 e3 79 14 c2 0f vpextrb $15, %xmm0, %edx
33dc: 42 0f b6 0c 30 movzbl (%rax,%r14), %ecx
33e1: 42 0f b6 3c 32 movzbl (%rdx,%r14), %edi
33e6: 43 8d 44 2d 00 leal (%r13,%r13), %eax
33eb: 44 89 ea movl %r13d, %edx
33ee: c0 fa 07 sarb $7, %dl
33f1: 80 e2 1b andb $27, %dl
33f4: 30 c2 xorb %al, %dl
33f6: 43 8d 04 24 leal (%r12,%r12), %eax
33fa: 44 89 e3 movl %r12d, %ebx
33fd: c0 fb 07 sarb $7, %bl
3400: 80 e3 1b andb $27, %bl
3403: 30 c8 xorb %cl, %al
3405: 30 d8 xorb %bl, %al
3407: 89 c3 movl %eax, %ebx
3409: 30 d3 xorb %dl, %bl
340b: 88 5d c0 movb %bl, -64(%rbp)
340e: 8d 1c 09 leal (%rcx,%rcx), %ebx
3411: 30 ca xorb %cl, %dl
3413: 88 55 d3 movb %dl, -45(%rbp)
3416: c0 f9 07 sarb $7, %cl
3419: 80 e1 1b andb $27, %cl
341c: 30 d9 xorb %bl, %cl
341e: 89 ca movl %ecx, %edx
3420: 88 4d d5 movb %cl, -43(%rbp)
3423: 8d 1c 3f leal (%rdi,%rdi), %ebx
3426: 48 89 7d 98 movq %rdi, -104(%rbp)
342a: c5 f9 6e df vmovd %edi, %xmm3
342e: 89 f9 movl %edi, %ecx
3430: c0 f9 07 sarb $7, %cl
3433: 80 e1 1b andb $27, %cl
3436: 30 d9 xorb %bl, %cl
3438: 43 8d 1c 12 leal (%r10,%r10), %ebx
343c: 45 89 d7 movl %r10d, %r15d
343f: 41 c0 ff 07 sarb $7, %r15b
3443: 41 80 e7 1b andb $27, %r15b
3447: 41 30 df xorb %bl, %r15b
344a: c4 e3 79 14 c3 09 vpextrb $9, %xmm0, %ebx
3450: 44 30 e8 xorb %r13b, %al
3453: 89 45 b0 movl %eax, -80(%rbp)
3456: 45 30 e5 xorb %r12b, %r13b
3459: c4 c1 79 6e e4 vmovd %r12d, %xmm4
345e: 0f b6 fa movzbl %dl, %edi
3461: c4 e3 59 20 e7 01 vpinsrb $1, %edi, %xmm4, %xmm4
3467: c4 e3 79 14 c7 0d vpextrb $13, %xmm0, %edi
346d: 42 0f b6 1c 33 movzbl (%rbx,%r14), %ebx
3472: 41 0f b6 c5 movzbl %r13b, %eax
3476: c4 e3 59 20 e0 02 vpinsrb $2, %eax, %xmm4, %xmm4
347c: c4 e3 79 14 c0 0e vpextrb $14, %xmm0, %eax
3482: 42 0f b6 3c 37 movzbl (%rdi,%r14), %edi
3487: 42 0f b6 04 30 movzbl (%rax,%r14), %eax
348c: 0f b6 c9 movzbl %cl, %ecx
348f: c4 e3 61 20 d9 01 vpinsrb $1, %ecx, %xmm3, %xmm3
3495: 8d 0c 00 leal (%rax,%rax), %ecx
3498: c4 e3 59 20 e3 03 vpinsrb $3, %ebx, %xmm4, %xmm4
349e: c4 e3 59 20 e0 04 vpinsrb $4, %eax, %xmm4, %xmm4
34a4: c4 e3 61 20 d8 02 vpinsrb $2, %eax, %xmm3, %xmm3
34aa: c0 f8 07 sarb $7, %al
34ad: 24 1b andb $27, %al
34af: 30 c8 xorb %cl, %al
34b1: 88 45 d4 movb %al, -44(%rbp)
34b4: 44 8d 1c 1b leal (%rbx,%rbx), %r11d
34b8: 89 d8 movl %ebx, %eax
34ba: c0 f8 07 sarb $7, %al
34bd: 24 1b andb $27, %al
34bf: 48 8b 4d b8 movq -72(%rbp), %rcx
34c3: 41 30 cb xorb %cl, %r11b
34c6: 41 30 c3 xorb %al, %r11b
34c9: 44 89 d8 movl %r11d, %eax
34cc: 44 88 5d cc movb %r11b, -52(%rbp)
34d0: 45 30 d3 xorb %r10b, %r11b
34d3: 41 30 da xorb %bl, %r10b
34d6: c4 e3 59 20 e1 05 vpinsrb $5, %ecx, %xmm4, %xmm4
34dc: 8d 04 09 leal (%rcx,%rcx), %eax
34df: 41 89 cd movl %ecx, %r13d
34e2: 41 c0 fd 07 sarb $7, %r13b
34e6: 41 80 e5 1b andb $27, %r13b
34ea: 41 30 c5 xorb %al, %r13b
34ed: 45 30 d5 xorb %r10b, %r13b
34f0: 47 8d 14 00 leal (%r8,%r8), %r10d
34f4: 89 f8 movl %edi, %eax
34f6: 44 30 c0 xorb %r8b, %al
34f9: 88 45 b8 movb %al, -72(%rbp)
34fc: 41 0f b6 f7 movzbl %r15b, %esi
3500: c4 e3 59 20 e6 06 vpinsrb $6, %esi, %xmm4, %xmm4
3506: c4 e3 59 20 e7 07 vpinsrb $7, %edi, %xmm4, %xmm4
350c: c4 c3 59 20 e0 08 vpinsrb $8, %r8d, %xmm4, %xmm4
3512: 44 89 c2 movl %r8d, %edx
3515: c0 fa 07 sarb $7, %dl
3518: 80 e2 1b andb $27, %dl
351b: 44 30 d2 xorb %r10b, %dl
351e: 8d 34 3f leal (%rdi,%rdi), %esi
3521: 41 89 fa movl %edi, %r10d
3524: 41 c0 fa 07 sarb $7, %r10b
3528: 41 80 e2 1b andb $27, %r10b
352c: 41 30 f2 xorb %sil, %r10b
352f: 45 89 cc movl %r9d, %r12d
3532: 4c 8b 45 a0 movq -96(%rbp), %r8
3536: 43 8d 3c 00 leal (%r8,%r8), %edi
353a: 44 30 cf xorb %r9b, %dil
353d: 43 8d 34 09 leal (%r9,%r9), %esi
3541: 41 c0 f9 07 sarb $7, %r9b
3545: 41 80 e1 1b andb $27, %r9b
3549: 41 30 f1 xorb %sil, %r9b
354c: 4c 89 c8 movq %r9, %rax
354f: c4 e3 79 14 c6 0b vpextrb $11, %xmm0, %esi
3555: 46 0f b6 0c 36 movzbl (%rsi,%r14), %r9d
355a: 44 89 c9 movl %r9d, %ecx
355d: c0 f9 07 sarb $7, %cl
3560: 80 e1 1b andb $27, %cl
3563: 43 8d 34 09 leal (%r9,%r9), %esi
3567: 40 30 f1 xorb %sil, %cl
356a: 44 89 c3 movl %r8d, %ebx
356d: c0 fb 07 sarb $7, %bl
3570: 80 e3 1b andb $27, %bl
3573: 40 30 df xorb %bl, %dil
3576: 89 7d b4 movl %edi, -76(%rbp)
3579: 45 30 c4 xorb %r8b, %r12b
357c: 41 30 d4 xorb %dl, %r12b
357f: 44 88 65 d7 movb %r12b, -41(%rbp)
3583: 0f b6 f0 movzbl %al, %esi
3586: c4 e3 59 20 e6 09 vpinsrb $9, %esi, %xmm4, %xmm4
358c: c4 c3 59 20 e1 0a vpinsrb $10, %r9d, %xmm4, %xmm4
3592: 0f b6 f1 movzbl %cl, %esi
3595: c4 e3 59 20 e6 0b vpinsrb $11, %esi, %xmm4, %xmm4
359b: 89 f8 movl %edi, %eax
359d: 44 30 c0 xorb %r8b, %al
35a0: 88 45 d6 movb %al, -42(%rbp)
35a3: 44 30 c2 xorb %r8b, %dl
35a6: c4 e3 79 14 c6 01 vpextrb $1, %xmm0, %esi
35ac: 4c 89 f0 movq %r14, %rax
35af: 46 0f b6 04 36 movzbl (%rsi,%r14), %r8d
35b4: c4 c3 79 14 c6 06 vpextrb $6, %xmm0, %r14d
35ba: 45 0f b6 34 06 movzbl (%r14,%rax), %r14d
35bf: 43 8d 34 00 leal (%r8,%r8), %esi
35c3: 44 30 f6 xorb %r14b, %sil
35c6: c4 c3 59 20 e6 0c vpinsrb $12, %r14d, %xmm4, %xmm4
35cc: 44 89 f0 movl %r14d, %eax
35cf: 45 01 f6 addl %r14d, %r14d
35d2: c0 f8 07 sarb $7, %al
35d5: 24 1b andb $27, %al
35d7: 44 30 f0 xorb %r14b, %al
35da: 44 89 c3 movl %r8d, %ebx
35dd: c0 fb 07 sarb $7, %bl
35e0: 80 e3 1b andb $27, %bl
35e3: 40 30 de xorb %bl, %sil
35e6: 41 89 f6 movl %esi, %r14d
35e9: 40 30 c6 xorb %al, %sil
35ec: 44 30 c8 xorb %r9b, %al
35ef: 0f b6 5d c0 movzbl -64(%rbp), %ebx
35f3: c5 f9 6e eb vmovd %ebx, %xmm5
35f7: 48 8b 5d 98 movq -104(%rbp), %rbx
35fb: c4 e3 79 14 c7 0c vpextrb $12, %xmm0, %edi
3601: 48 89 7d c0 movq %rdi, -64(%rbp)
3605: 32 5d d5 xorb -43(%rbp), %bl
3608: 4c 8d 0d 81 07 00 00 leaq 1921(%rip), %r9 ## 0x3d90 <_sbox>
360f: 4c 8b 65 c0 movq -64(%rbp), %r12
3613: 47 0f b6 0c 0c movzbl (%r12,%r9), %r9d
3618: 44 30 7d cc xorb %r15b, -52(%rbp)
361c: 45 30 c6 xorb %r8b, %r14b
361f: 45 30 c8 xorb %r9b, %r8b
3622: 45 89 c7 movl %r8d, %r15d
3625: 41 30 cf xorb %cl, %r15b
3628: 0f b6 cb movzbl %bl, %ecx
362b: 89 4d c0 movl %ecx, -64(%rbp)
362e: 0f b6 5d b0 movzbl -80(%rbp), %ebx
3632: c4 e3 51 20 c3 01 vpinsrb $1, %ebx, %xmm5, %xmm0
3638: 0f b6 5d d3 movzbl -45(%rbp), %ebx
363c: 0f b6 4d cc movzbl -52(%rbp), %ecx
3640: 89 4d cc movl %ecx, -52(%rbp)
3643: c4 e3 79 20 45 c0 02 vpinsrb $2, -64(%rbp), %xmm0, %xmm0
364a: 41 0f b6 cd movzbl %r13b, %ecx
364e: 0f b6 7d d7 movzbl -41(%rbp), %edi
3652: c4 e3 79 20 c3 03 vpinsrb $3, %ebx, %xmm0, %xmm0
3658: 0f b6 5d d6 movzbl -42(%rbp), %ebx
365c: 0f b6 d2 movzbl %dl, %edx
365f: c4 e3 79 20 45 cc 04 vpinsrb $4, -52(%rbp), %xmm0, %xmm0
3666: 45 0f b6 f6 movzbl %r14b, %r14d
366a: 44 0f b6 e8 movzbl %al, %r13d
366e: 41 0f b6 c3 movzbl %r11b, %eax
3672: c4 e3 79 20 c0 05 vpinsrb $5, %eax, %xmm0, %xmm0
3678: 45 0f b6 df movzbl %r15b, %r11d
367c: c4 e3 79 20 c1 06 vpinsrb $6, %ecx, %xmm0, %xmm0
3682: 44 0f b6 7d d4 movzbl -44(%rbp), %r15d
3687: c4 e3 79 20 c1 07 vpinsrb $7, %ecx, %xmm0, %xmm0
368d: 45 0f b6 d2 movzbl %r10b, %r10d
3691: c4 e3 79 20 c7 08 vpinsrb $8, %edi, %xmm0, %xmm0
3697: 43 8d 04 09 leal (%r9,%r9), %eax
369b: c4 e3 79 20 c3 09 vpinsrb $9, %ebx, %xmm0, %xmm0
36a1: 0f b6 5d b8 movzbl -72(%rbp), %ebx
36a5: 44 89 c9 movl %r9d, %ecx
36a8: c0 f9 07 sarb $7, %cl
36ab: 80 e1 1b andb $27, %cl
36ae: 30 c1 xorb %al, %cl
36b0: 0f b6 45 b4 movzbl -76(%rbp), %eax
36b4: c4 e3 79 20 c0 0a vpinsrb $10, %eax, %xmm0, %xmm0
36ba: c4 e3 79 20 c2 0b vpinsrb $11, %edx, %xmm0, %xmm0
36c0: 0f b6 c1 movzbl %cl, %eax
36c3: c4 c3 79 20 c6 0c vpinsrb $12, %r14d, %xmm0, %xmm0
36c9: 40 0f b6 ce movzbl %sil, %ecx
36cd: c4 e3 79 20 c1 0d vpinsrb $13, %ecx, %xmm0, %xmm0
36d3: c4 c3 79 20 c5 0e vpinsrb $14, %r13d, %xmm0, %xmm0
36d9: c4 c3 79 20 c3 0f vpinsrb $15, %r11d, %xmm0, %xmm0
36df: c4 c3 61 20 df 03 vpinsrb $3, %r15d, %xmm3, %xmm3
36e5: c4 c3 61 20 da 04 vpinsrb $4, %r10d, %xmm3, %xmm3
36eb: c4 e3 61 20 db 05 vpinsrb $5, %ebx, %xmm3, %xmm3
36f1: c4 e3 61 20 d8 06 vpinsrb $6, %eax, %xmm3, %xmm3
36f7: c4 c3 61 20 d9 07 vpinsrb $7, %r9d, %xmm3, %xmm3
36fd: 41 0f b6 c0 movzbl %r8b, %eax
3701: c4 e3 61 20 d8 08 vpinsrb $8, %eax, %xmm3, %xmm3
3707: 48 8b 45 90 movq -112(%rbp), %rax
370b: c4 e2 59 00 e1 vpshufb %xmm1, %xmm4, %xmm4
3710: c5 f9 ef e4 vpxor %xmm4, %xmm0, %xmm4
3714: c4 e2 61 00 c2 vpshufb %xmm2, %xmm3, %xmm0
3719: 62 f3 dd 08 25 44 05 ee 96 vpternlogq $150, -288(%rbp,%rax), %xmm4, %xmm0
3722: 48 83 c0 10 addq $16, %rax
3726: 48 3d a0 00 00 00 cmpq $160, %rax
372c: 0f 85 3e fc ff ff jne 0x3370 <_aes128_encrypt+0x50>
3732: c4 e3 79 14 c0 00 vpextrb $0, %xmm0, %eax
3738: 48 8d 0d 51 06 00 00 leaq 1617(%rip), %rcx ## 0x3d90 <_sbox>
373f: 0f b6 04 08 movzbl (%rax,%rcx), %eax
3743: c5 f9 6e c8 vmovd %eax, %xmm1
3747: c4 e3 79 14 c0 05 vpextrb $5, %xmm0, %eax
374d: c4 e3 71 20 0c 08 01 vpinsrb $1, (%rax,%rcx), %xmm1, %xmm1
3754: c4 e3 79 14 c0 0a vpextrb $10, %xmm0, %eax
375a: c4 e3 71 20 0c 08 02 vpinsrb $2, (%rax,%rcx), %xmm1, %xmm1
3761: c4 e3 79 14 c0 0f vpextrb $15, %xmm0, %eax
3767: c4 e3 71 20 0c 08 03 vpinsrb $3, (%rax,%rcx), %xmm1, %xmm1
376e: c4 e3 79 14 c0 04 vpextrb $4, %xmm0, %eax
3774: c4 e3 71 20 0c 08 04 vpinsrb $4, (%rax,%rcx), %xmm1, %xmm1
377b: c4 e3 79 14 c0 09 vpextrb $9, %xmm0, %eax
3781: c4 e3 71 20 0c 08 05 vpinsrb $5, (%rax,%rcx), %xmm1, %xmm1
3788: c4 e3 79 14 c0 0e vpextrb $14, %xmm0, %eax
378e: c4 e3 71 20 0c 08 06 vpinsrb $6, (%rax,%rcx), %xmm1, %xmm1
3795: c4 e3 79 14 c0 03 vpextrb $3, %xmm0, %eax
379b: c4 e3 71 20 0c 08 07 vpinsrb $7, (%rax,%rcx), %xmm1, %xmm1
37a2: c4 e3 79 14 c0 08 vpextrb $8, %xmm0, %eax
37a8: c4 e3 71 20 0c 08 08 vpinsrb $8, (%rax,%rcx), %xmm1, %xmm1
37af: c4 e3 79 14 c0 0d vpextrb $13, %xmm0, %eax
37b5: c4 e3 71 20 0c 08 09 vpinsrb $9, (%rax,%rcx), %xmm1, %xmm1
37bc: c4 e3 79 14 c0 02 vpextrb $2, %xmm0, %eax
37c2: c4 e3 71 20 0c 08 0a vpinsrb $10, (%rax,%rcx), %xmm1, %xmm1
37c9: c4 e3 79 14 c0 07 vpextrb $7, %xmm0, %eax
37cf: c4 e3 71 20 0c 08 0b vpinsrb $11, (%rax,%rcx), %xmm1, %xmm1
37d6: c4 e3 79 14 c0 0c vpextrb $12, %xmm0, %eax
37dc: c4 e3 71 20 0c 08 0c vpinsrb $12, (%rax,%rcx), %xmm1, %xmm1
37e3: c4 e3 79 14 c0 01 vpextrb $1, %xmm0, %eax
37e9: c4 e3 71 20 0c 08 0d vpinsrb $13, (%rax,%rcx), %xmm1, %xmm1
37f0: c4 e3 79 14 c0 06 vpextrb $6, %xmm0, %eax
37f6: c4 e3 71 20 0c 08 0e vpinsrb $14, (%rax,%rcx), %xmm1, %xmm1
37fd: c4 e3 79 14 c0 0b vpextrb $11, %xmm0, %eax
3803: c4 e3 71 20 04 08 0f vpinsrb $15, (%rax,%rcx), %xmm1, %xmm0
380a: c5 f9 ef 45 80 vpxor -128(%rbp), %xmm0, %xmm0
380f: 48 8b 45 a8 movq -88(%rbp), %rax
3813: c5 fa 7f 00 vmovdqu %xmm0, (%rax)
3817: 48 81 c4 f8 00 00 00 addq $248, %rsp
381e: 5b popq %rbx
381f: 41 5c popq %r12
3821: 41 5d popq %r13
3823: 41 5e popq %r14
3825: 41 5f popq %r15
3827: 5d popq %rbp
3828: c3 retq
static inline uint8_t xtime(uint8_t x) {
return (uint8_t)((x << 1) ^ (((x >> 7) & 1) * 0x1b));
}
static inline uint8_t gmul(uint8_t a, uint8_t b) {
uint8_t p = 0;
for (int i = 0; i < 8; i++) {
if (b & 1)
p ^= a;
uint8_t hi = a & 0x80;
a <<= 1;
if (hi)
a ^= 0x1b;
b >>= 1;
}
return p;
}
static void key_expansion(const uint8_t key[16], uint8_t round_keys[176]) {
/* Copy original key */
for (int i = 0; i < 16; i++)
round_keys[i] = key[i];
for (int i = 4; i < 44; i++) {
uint8_t temp[4];
temp[0] = round_keys[(i - 1) * 4 + 0];
temp[1] = round_keys[(i - 1) * 4 + 1];
temp[2] = round_keys[(i - 1) * 4 + 2];
temp[3] = round_keys[(i - 1) * 4 + 3];
if (i % 4 == 0) {
/* RotWord + SubWord + Rcon */
uint8_t t = temp[0];
temp[0] = sbox[temp[1]] ^ rcon[i / 4];
temp[1] = sbox[temp[2]];
temp[2] = sbox[temp[3]];
temp[3] = sbox[t];
}
round_keys[i * 4 + 0] = round_keys[(i - 4) * 4 + 0] ^ temp[0];
round_keys[i * 4 + 1] = round_keys[(i - 4) * 4 + 1] ^ temp[1];
round_keys[i * 4 + 2] = round_keys[(i - 4) * 4 + 2] ^ temp[2];
round_keys[i * 4 + 3] = round_keys[(i - 4) * 4 + 3] ^ temp[3];
}
}
static void sub_bytes(uint8_t state[16]) {
for (int i = 0; i < 16; i++)
state[i] = sbox[state[i]];
}
static void shift_rows(uint8_t state[16]) {
uint8_t t;
/* Row 1: shift left by 1 */
t = state[1]; state[1] = state[5]; state[5] = state[9];
state[9] = state[13]; state[13] = t;
/* Row 2: shift left by 2 */
t = state[2]; state[2] = state[10]; state[10] = t;
t = state[6]; state[6] = state[14]; state[14] = t;
/* Row 3: shift left by 3 */
t = state[3]; state[3] = state[15]; state[15] = state[11];
state[11] = state[7]; state[7] = t;
}
static void mix_columns(uint8_t state[16]) {
for (int i = 0; i < 4; i++) {
int c = i * 4;
uint8_t a0 = state[c], a1 = state[c + 1];
uint8_t a2 = state[c + 2], a3 = state[c + 3];
state[c + 0] = xtime(a0) ^ xtime(a1) ^ a1 ^ a2 ^ a3;
state[c + 1] = a0 ^ xtime(a1) ^ xtime(a2) ^ a2 ^ a3;
state[c + 2] = a0 ^ a1 ^ xtime(a2) ^ xtime(a3) ^ a3;
state[c + 3] = xtime(a0) ^ a0 ^ a1 ^ a2 ^ xtime(a3);
}
}
static void add_round_key(uint8_t state[16], const uint8_t *rk) {
for (int i = 0; i < 16; i++)
state[i] ^= rk[i];
}
static void inv_sub_bytes(uint8_t state[16]) {
for (int i = 0; i < 16; i++)
state[i] = inv_sbox[state[i]];
}
static void inv_shift_rows(uint8_t state[16]) {
uint8_t t;
/* Row 1: shift right by 1 */
t = state[13]; state[13] = state[9]; state[9] = state[5];
state[5] = state[1]; state[1] = t;
/* Row 2: shift right by 2 */
t = state[2]; state[2] = state[10]; state[10] = t;
t = state[6]; state[6] = state[14]; state[14] = t;
/* Row 3: shift right by 3 */
t = state[7]; state[7] = state[11]; state[11] = state[15];
state[15] = state[3]; state[3] = t;
}
static void inv_mix_columns(uint8_t state[16]) {
for (int i = 0; i < 4; i++) {
int c = i * 4;
uint8_t a0 = state[c], a1 = state[c + 1];
uint8_t a2 = state[c + 2], a3 = state[c + 3];
state[c + 0] = gmul(a0, 0x0e) ^ gmul(a1, 0x0b) ^ gmul(a2, 0x0d) ^ gmul(a3, 0x09);
state[c + 1] = gmul(a0, 0x09) ^ gmul(a1, 0x0e) ^ gmul(a2, 0x0b) ^ gmul(a3, 0x0d);
state[c + 2] = gmul(a0, 0x0d) ^ gmul(a1, 0x09) ^ gmul(a2, 0x0e) ^ gmul(a3, 0x0b);
state[c + 3] = gmul(a0, 0x0b) ^ gmul(a1, 0x0d) ^ gmul(a2, 0x09) ^ gmul(a3, 0x0e);
}
}
void aes128_encrypt(uint8_t state[16], const uint8_t key[16]) {
uint8_t rk[176];
key_expansion(key, rk);
add_round_key(state, rk);
for (int round = 1; round < 10; round++) {
sub_bytes(state);
shift_rows(state);
mix_columns(state);
add_round_key(state, rk + round * 16);
}
/* Final round (no MixColumns) */
sub_bytes(state);
shift_rows(state);
add_round_key(state, rk + 160);
}
; aes128_encrypt (O3) — AFTER optimization
; Blocks: 4, Native insns: 183, REIL insns: 1276
;
BB 0x3320 (CALL) -> 0x3830
; 0x3320: push rbp (1 bytes)
0x3320.0: SUB R_RSP:U64, 0x8:U64, V_00:U64
0x3320.1: STR V_00:U64, _, R_RSP:U64
0x3320.2: STM R_RBP:U64, _, V_00:U64 ASM_END
; 0x3321: mov rbp, rsp (3 bytes)
0x3321.0: STR R_RSP:U64, _, R_RBP:U64 ASM_END
; 0x3324: push r15 (2 bytes)
0x3324.0: SUB R_RSP:U64, 0x8:U64, V_00:U64
0x3324.1: STR V_00:U64, _, R_RSP:U64
0x3324.2: STM R_R15:U64, _, V_00:U64 ASM_END
; 0x3326: push r14 (2 bytes)
0x3326.0: SUB R_RSP:U64, 0x8:U64, V_00:U64
0x3326.1: STR V_00:U64, _, R_RSP:U64
0x3326.2: STM R_R14:U64, _, V_00:U64 ASM_END
; 0x3328: push r13 (2 bytes)
0x3328.0: SUB R_RSP:U64, 0x8:U64, V_00:U64
0x3328.1: STR V_00:U64, _, R_RSP:U64
0x3328.2: STM R_R13:U64, _, V_00:U64 ASM_END
; 0x332a: push r12 (2 bytes)
0x332a.0: SUB R_RSP:U64, 0x8:U64, V_00:U64
0x332a.1: STR V_00:U64, _, R_RSP:U64
0x332a.2: STM R_R12:U64, _, V_00:U64 ASM_END
; 0x332c: push rbx (1 bytes)
0x332c.0: SUB R_RSP:U64, 0x8:U64, V_00:U64
0x332c.1: STR V_00:U64, _, R_RSP:U64
0x332c.2: STM R_RBX:U64, _, V_00:U64 ASM_END
; 0x332d: sub rsp, 0xf8 (7 bytes)
0x332d.0: SUB R_RSP:U64, 0xf8:U64, V_00:U64
0x332d.25: STR V_00:U64, _, R_RSP:U64 ASM_END
; 0x3337: mov rbx, rdi (3 bytes)
0x3337.0: STR R_RDI:U64, _, R_RBX:U64 ASM_END
; 0x3344: call 0x3830 (5 bytes)
0x3344.0: SUB R_RSP:U64, 0x8:U64, V_00:U64
0x3344.1: STR V_00:U64, _, R_RSP:U64
0x3344.2: STM 0x3349:U64, _, V_00:U64
0x3344.3: JCC 0x1:U1, _, 0x3830:U64 ASM_END BB_END CALL
BB 0x3349 (FALL_THROUGH)
; 0x3349: mov qword ptr [rbp - 0x58], rbx (4 bytes)
0x3349.0: ADD R_RBP:U64, 0xffffffffffffffa8:U64, V_00:U64
0x3349.1: STM R_RBX:U64, _, V_00:U64 ASM_END
; 0x334d: vmovdqu xmm0, xmmword ptr [rbx] (4 bytes)
0x334d.0: LDM R_RBX:U64, _, V_00:U128
0x334d.1: OR V_00:U128, 0x0:U512, V_01:U128
0x334d.2: OR V_01:U128, 0x0:U128, R_ZMM0:U512 ASM_END
; 0x3351: vpxor xmm0, xmm0, xmmword ptr [rbp - 0x120] (8 bytes)
0x3351.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x3351.1: ADD R_RBP:U64, 0xfffffffffffffee0:U64, V_01:U64
0x3351.2: LDM V_01:U64, _, V_02:U128
0x3351.3: XOR V_00:U128, V_02:U128, V_03:U128
0x3351.4: OR V_03:U128, 0x0:U512, V_04:U128
0x3351.5: OR V_04:U128, 0x0:U128, R_ZMM0:U512 ASM_END
; 0x3359: mov eax, 0x10 (5 bytes)
0x3359.1: STR 0x10:U64, _, R_RAX:U64 ASM_END
; 0x335e: vmovdqa xmm1, xmmword ptr [rip + 0x9ba] (8 bytes)
0x335e.0: LDM 0x3d20:U64, _, V_00:U128
0x335e.1: OR V_00:U128, 0x0:U512, V_01:U128
0x335e.2: OR V_01:U128, 0x0:U128, R_ZMM1:U512 ASM_END
; 0x3366: vmovdqa xmm2, xmmword ptr [rip + 0x9c2] (8 bytes)
0x3366.0: LDM 0x3d30:U64, _, V_00:U128
0x3366.1: OR V_00:U128, 0x0:U512, V_01:U128
0x3366.2: OR V_01:U128, 0x0:U128, R_ZMM2:U512 ASM_END
; 0x336e: nop (2 bytes)
0x336e.0: NONE _, _, _ ASM_END
BB 0x3370 (BRANCH) -> 0x3370
; 0x3370: mov qword ptr [rbp - 0x70], rax (4 bytes)
0x3370.0: ADD R_RBP:U64, 0xffffffffffffff90:U64, V_00:U64
0x3370.1: STM R_RAX:U64, _, V_00:U64 ASM_END
; 0x3374: vpextrb eax, xmm0, 0 (6 bytes)
0x3374.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x3374.1: OR V_00:U128, 0x0:U128, V_01:U8
0x3374.2: OR V_01:U8, 0x0:U8, V_02:U32
0x3374.3: OR V_02:U32, 0x0:U64, V_03:U32
0x3374.4: OR V_03:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x3381: movzx r13d, byte ptr [rax + r14] (5 bytes)
0x3381.0: ADD R_RAX:U64, 0x3d90:U64, V_00:U64
0x3381.1: LDM V_00:U64, _, V_01:U8
0x3381.2: OR V_01:U8, 0x0:U8, V_02:U32
0x3381.3: OR V_02:U32, 0x0:U64, V_03:U32
0x3381.4: OR V_03:U32, 0x0:U32, R_R13:U64 ASM_END
; 0x3386: vpextrb eax, xmm0, 2 (6 bytes)
0x3386.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x3386.1: SHR V_00:U128, 0x10:U8, V_01:U128
0x3386.2: OR V_01:U128, 0x0:U128, V_02:U8
0x3386.3: OR V_02:U8, 0x0:U8, V_03:U32
0x3386.4: OR V_03:U32, 0x0:U64, V_04:U32
0x3386.5: OR V_04:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x338c: vpextrb ecx, xmm0, 3 (6 bytes)
0x338c.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x338c.1: SHR V_00:U128, 0x18:U8, V_01:U128
0x338c.2: OR V_01:U128, 0x0:U128, V_02:U8
0x338c.3: OR V_02:U8, 0x0:U8, V_03:U32
0x338c.4: OR V_03:U32, 0x0:U64, V_04:U32
0x338c.5: OR V_04:U32, 0x0:U32, R_RCX:U64 ASM_END
; 0x3392: movzx eax, byte ptr [rax + r14] (5 bytes)
0x3392.0: ADD R_RAX:U64, 0x3d90:U64, V_00:U64
0x3392.1: LDM V_00:U64, _, V_01:U8
0x3392.2: OR V_01:U8, 0x0:U8, V_02:U32
0x3392.3: OR V_02:U32, 0x0:U64, V_03:U32
0x3392.4: OR V_03:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x3397: mov qword ptr [rbp - 0x60], rax (4 bytes)
0x3397.0: ADD R_RBP:U64, 0xffffffffffffffa0:U64, V_00:U64
0x3397.1: STM R_RAX:U64, _, V_00:U64 ASM_END
; 0x339b: movzx eax, byte ptr [rcx + r14] (5 bytes)
0x339b.0: ADD R_RCX:U64, 0x3d90:U64, V_00:U64
0x339b.1: LDM V_00:U64, _, V_01:U8
0x339b.2: OR V_01:U8, 0x0:U8, V_02:U32
0x339b.3: OR V_02:U32, 0x0:U64, V_03:U32
0x339b.4: OR V_03:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x33a0: mov qword ptr [rbp - 0x48], rax (4 bytes)
0x33a0.0: ADD R_RBP:U64, 0xffffffffffffffb8:U64, V_00:U64
0x33a0.1: STM R_RAX:U64, _, V_00:U64 ASM_END
; 0x33a4: vpextrb eax, xmm0, 4 (6 bytes)
0x33a4.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x33a4.1: SHR V_00:U128, 0x20:U8, V_01:U128
0x33a4.2: OR V_01:U128, 0x0:U128, V_02:U8
0x33a4.3: OR V_02:U8, 0x0:U8, V_03:U32
0x33a4.4: OR V_03:U32, 0x0:U64, V_04:U32
0x33a4.5: OR V_04:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x33aa: vpextrb ecx, xmm0, 5 (6 bytes)
0x33aa.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x33aa.1: SHR V_00:U128, 0x28:U8, V_01:U128
0x33aa.2: OR V_01:U128, 0x0:U128, V_02:U8
0x33aa.3: OR V_02:U8, 0x0:U8, V_03:U32
0x33aa.4: OR V_03:U32, 0x0:U64, V_04:U32
0x33aa.5: OR V_04:U32, 0x0:U32, R_RCX:U64 ASM_END
; 0x33b0: movzx r10d, byte ptr [rax + r14] (5 bytes)
0x33b0.0: ADD R_RAX:U64, 0x3d90:U64, V_00:U64
0x33b0.1: LDM V_00:U64, _, V_01:U8
0x33b0.2: OR V_01:U8, 0x0:U8, V_02:U32
0x33b0.3: OR V_02:U32, 0x0:U64, V_03:U32
0x33b0.4: OR V_03:U32, 0x0:U32, R_R10:U64 ASM_END
; 0x33b5: movzx r12d, byte ptr [rcx + r14] (5 bytes)
0x33b5.0: ADD R_RCX:U64, 0x3d90:U64, V_00:U64
0x33b5.1: LDM V_00:U64, _, V_01:U8
0x33b5.2: OR V_01:U8, 0x0:U8, V_02:U32
0x33b5.3: OR V_02:U32, 0x0:U64, V_03:U32
0x33b5.4: OR V_03:U32, 0x0:U32, R_R12:U64 ASM_END
; 0x33ba: vpextrb eax, xmm0, 7 (6 bytes)
0x33ba.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x33ba.1: SHR V_00:U128, 0x38:U8, V_01:U128
0x33ba.2: OR V_01:U128, 0x0:U128, V_02:U8
0x33ba.3: OR V_02:U8, 0x0:U8, V_03:U32
0x33ba.4: OR V_03:U32, 0x0:U64, V_04:U32
0x33ba.5: OR V_04:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x33c0: vpextrb ecx, xmm0, 8 (6 bytes)
0x33c0.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x33c0.1: SHR V_00:U128, 0x40:U8, V_01:U128
0x33c0.2: OR V_01:U128, 0x0:U128, V_02:U8
0x33c0.3: OR V_02:U8, 0x0:U8, V_03:U32
0x33c0.4: OR V_03:U32, 0x0:U64, V_04:U32
0x33c0.5: OR V_04:U32, 0x0:U32, R_RCX:U64 ASM_END
; 0x33c6: movzx r9d, byte ptr [rax + r14] (5 bytes)
0x33c6.0: ADD R_RAX:U64, 0x3d90:U64, V_00:U64
0x33c6.1: LDM V_00:U64, _, V_01:U8
0x33c6.2: OR V_01:U8, 0x0:U8, V_02:U32
0x33c6.3: OR V_02:U32, 0x0:U64, V_03:U32
0x33c6.4: OR V_03:U32, 0x0:U32, R_R9:U64 ASM_END
; 0x33cb: movzx r8d, byte ptr [rcx + r14] (5 bytes)
0x33cb.0: ADD R_RCX:U64, 0x3d90:U64, V_00:U64
0x33cb.1: LDM V_00:U64, _, V_01:U8
0x33cb.2: OR V_01:U8, 0x0:U8, V_02:U32
0x33cb.3: OR V_02:U32, 0x0:U64, V_03:U32
0x33cb.4: OR V_03:U32, 0x0:U32, R_R8:U64 ASM_END
; 0x33d0: vpextrb eax, xmm0, 0xa (6 bytes)
0x33d0.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x33d0.1: SHR V_00:U128, 0x50:U8, V_01:U128
0x33d0.2: OR V_01:U128, 0x0:U128, V_02:U8
0x33d0.3: OR V_02:U8, 0x0:U8, V_03:U32
0x33d0.4: OR V_03:U32, 0x0:U64, V_04:U32
0x33d0.5: OR V_04:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x33d6: vpextrb edx, xmm0, 0xf (6 bytes)
0x33d6.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x33d6.1: SHR V_00:U128, 0x78:U8, V_01:U128
0x33d6.2: OR V_01:U128, 0x0:U128, V_02:U8
0x33d6.3: OR V_02:U8, 0x0:U8, V_03:U32
0x33d6.4: OR V_03:U32, 0x0:U64, V_04:U32
0x33d6.5: OR V_04:U32, 0x0:U32, R_RDX:U64 ASM_END
; 0x33dc: movzx ecx, byte ptr [rax + r14] (5 bytes)
0x33dc.0: ADD R_RAX:U64, 0x3d90:U64, V_00:U64
0x33dc.1: LDM V_00:U64, _, V_01:U8
0x33dc.2: OR V_01:U8, 0x0:U8, V_02:U32
0x33dc.3: OR V_02:U32, 0x0:U64, V_03:U32
0x33dc.4: OR V_03:U32, 0x0:U32, R_RCX:U64 ASM_END
; 0x33e1: movzx edi, byte ptr [rdx + r14] (5 bytes)
0x33e1.0: ADD R_RDX:U64, 0x3d90:U64, V_00:U64
0x33e1.1: LDM V_00:U64, _, V_01:U8
0x33e1.2: OR V_01:U8, 0x0:U8, V_02:U32
0x33e1.3: OR V_02:U32, 0x0:U64, V_03:U32
0x33e1.4: OR V_03:U32, 0x0:U32, R_RDI:U64 ASM_END
; 0x33e6: lea eax, [r13 + r13] (5 bytes)
0x33e6.0: ADD R_R13:U64, R_R13:U64, V_00:U64
0x33e6.1: OR V_00:U64, 0x0:U64, V_01:U32
0x33e6.2: OR V_01:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x33eb: mov edx, r13d (3 bytes)
0x33eb.0: AND R_R13:U64, 0xffffffff:U64, V_00:U32
0x33eb.1: OR V_00:U32, 0x0:U64, V_01:U32
0x33eb.2: OR V_01:U32, 0x0:U32, R_RDX:U64 ASM_END
; 0x33ee: sar dl, 7 (3 bytes)
0x33ee.0: AND R_RDX:U64, 0xff:U64, V_00:U8
0x33ee.2: SAR V_00:U8, 0x7:U8, V_02:U8
0x33ee.22: AND R_RDX:U64, 0xffffffffffffff00:U64, V_16:U64
0x33ee.23: OR V_02:U8, 0x0:U8, V_17:U64
0x33ee.24: OR V_16:U64, V_17:U64, R_RDX:U64 ASM_END
; 0x33f1: and dl, 0x1b (3 bytes)
0x33f1.0: AND R_RDX:U64, 0xff:U64, V_00:U8
0x33f1.1: AND V_00:U8, 0x1b:U8, V_01:U8
0x33f1.18: AND R_RDX:U64, 0xffffffffffffff00:U64, V_12:U64
0x33f1.19: OR V_01:U8, 0x0:U8, V_13:U64
0x33f1.20: OR V_12:U64, V_13:U64, R_RDX:U64 ASM_END
; 0x33f4: xor dl, al (2 bytes)
0x33f4.0: AND R_RDX:U64, 0xff:U64, V_00:U8
0x33f4.1: AND R_RAX:U64, 0xff:U64, V_01:U8
0x33f4.2: XOR V_00:U8, V_01:U8, V_02:U8
0x33f4.19: AND R_RDX:U64, 0xffffffffffffff00:U64, V_13:U64
0x33f4.20: OR V_02:U8, 0x0:U8, V_14:U64
0x33f4.21: OR V_13:U64, V_14:U64, R_RDX:U64 ASM_END
; 0x33f6: lea eax, [r12 + r12] (4 bytes)
0x33f6.0: ADD R_R12:U64, R_R12:U64, V_00:U64
0x33f6.1: OR V_00:U64, 0x0:U64, V_01:U32
0x33f6.2: OR V_01:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x33fa: mov ebx, r12d (3 bytes)
0x33fa.0: AND R_R12:U64, 0xffffffff:U64, V_00:U32
0x33fa.1: OR V_00:U32, 0x0:U64, V_01:U32
0x33fa.2: OR V_01:U32, 0x0:U32, R_RBX:U64 ASM_END
; 0x33fd: sar bl, 7 (3 bytes)
0x33fd.0: AND R_RBX:U64, 0xff:U64, V_00:U8
0x33fd.2: SAR V_00:U8, 0x7:U8, V_02:U8
0x33fd.22: AND R_RBX:U64, 0xffffffffffffff00:U64, V_16:U64
0x33fd.23: OR V_02:U8, 0x0:U8, V_17:U64
0x33fd.24: OR V_16:U64, V_17:U64, R_RBX:U64 ASM_END
; 0x3400: and bl, 0x1b (3 bytes)
0x3400.0: AND R_RBX:U64, 0xff:U64, V_00:U8
0x3400.1: AND V_00:U8, 0x1b:U8, V_01:U8
0x3400.18: AND R_RBX:U64, 0xffffffffffffff00:U64, V_12:U64
0x3400.19: OR V_01:U8, 0x0:U8, V_13:U64
0x3400.20: OR V_12:U64, V_13:U64, R_RBX:U64 ASM_END
; 0x3403: xor al, cl (2 bytes)
0x3403.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x3403.1: AND R_RCX:U64, 0xff:U64, V_01:U8
0x3403.2: XOR V_00:U8, V_01:U8, V_02:U8
0x3403.19: AND R_RAX:U64, 0xffffffffffffff00:U64, V_13:U64
0x3403.20: OR V_02:U8, 0x0:U8, V_14:U64
0x3403.21: OR V_13:U64, V_14:U64, R_RAX:U64 ASM_END
; 0x3405: xor al, bl (2 bytes)
0x3405.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x3405.1: AND R_RBX:U64, 0xff:U64, V_01:U8
0x3405.2: XOR V_00:U8, V_01:U8, V_02:U8
0x3405.19: AND R_RAX:U64, 0xffffffffffffff00:U64, V_13:U64
0x3405.20: OR V_02:U8, 0x0:U8, V_14:U64
0x3405.21: OR V_13:U64, V_14:U64, R_RAX:U64 ASM_END
; 0x3407: mov ebx, eax (2 bytes)
0x3407.0: AND R_RAX:U64, 0xffffffff:U64, V_00:U32
0x3407.1: OR V_00:U32, 0x0:U64, V_01:U32
0x3407.2: OR V_01:U32, 0x0:U32, R_RBX:U64 ASM_END
; 0x3409: xor bl, dl (2 bytes)
0x3409.0: AND R_RBX:U64, 0xff:U64, V_00:U8
0x3409.1: AND R_RDX:U64, 0xff:U64, V_01:U8
0x3409.2: XOR V_00:U8, V_01:U8, V_02:U8
0x3409.19: AND R_RBX:U64, 0xffffffffffffff00:U64, V_13:U64
0x3409.20: OR V_02:U8, 0x0:U8, V_14:U64
0x3409.21: OR V_13:U64, V_14:U64, R_RBX:U64 ASM_END
; 0x340b: mov byte ptr [rbp - 0x40], bl (3 bytes)
0x340b.0: AND R_RBX:U64, 0xff:U64, V_00:U8
0x340b.1: ADD R_RBP:U64, 0xffffffffffffffc0:U64, V_01:U64
0x340b.2: STM V_00:U8, _, V_01:U64 ASM_END
; 0x340e: lea ebx, [rcx + rcx] (3 bytes)
0x340e.0: ADD R_RCX:U64, R_RCX:U64, V_00:U64
0x340e.1: OR V_00:U64, 0x0:U64, V_01:U32
0x340e.2: OR V_01:U32, 0x0:U32, R_RBX:U64 ASM_END
; 0x3411: xor dl, cl (2 bytes)
0x3411.0: AND R_RDX:U64, 0xff:U64, V_00:U8
0x3411.1: AND R_RCX:U64, 0xff:U64, V_01:U8
0x3411.2: XOR V_00:U8, V_01:U8, V_02:U8
0x3411.19: AND R_RDX:U64, 0xffffffffffffff00:U64, V_13:U64
0x3411.20: OR V_02:U8, 0x0:U8, V_14:U64
0x3411.21: OR V_13:U64, V_14:U64, R_RDX:U64 ASM_END
; 0x3413: mov byte ptr [rbp - 0x2d], dl (3 bytes)
0x3413.0: AND R_RDX:U64, 0xff:U64, V_00:U8
0x3413.1: ADD R_RBP:U64, 0xffffffffffffffd3:U64, V_01:U64
0x3413.2: STM V_00:U8, _, V_01:U64 ASM_END
; 0x3416: sar cl, 7 (3 bytes)
0x3416.0: AND R_RCX:U64, 0xff:U64, V_00:U8
0x3416.2: SAR V_00:U8, 0x7:U8, V_02:U8
0x3416.22: AND R_RCX:U64, 0xffffffffffffff00:U64, V_16:U64
0x3416.23: OR V_02:U8, 0x0:U8, V_17:U64
0x3416.24: OR V_16:U64, V_17:U64, R_RCX:U64 ASM_END
; 0x3419: and cl, 0x1b (3 bytes)
0x3419.0: AND R_RCX:U64, 0xff:U64, V_00:U8
0x3419.1: AND V_00:U8, 0x1b:U8, V_01:U8
0x3419.18: AND R_RCX:U64, 0xffffffffffffff00:U64, V_12:U64
0x3419.19: OR V_01:U8, 0x0:U8, V_13:U64
0x3419.20: OR V_12:U64, V_13:U64, R_RCX:U64 ASM_END
; 0x341c: xor cl, bl (2 bytes)
0x341c.0: AND R_RCX:U64, 0xff:U64, V_00:U8
0x341c.1: AND R_RBX:U64, 0xff:U64, V_01:U8
0x341c.2: XOR V_00:U8, V_01:U8, V_02:U8
0x341c.19: AND R_RCX:U64, 0xffffffffffffff00:U64, V_13:U64
0x341c.20: OR V_02:U8, 0x0:U8, V_14:U64
0x341c.21: OR V_13:U64, V_14:U64, R_RCX:U64 ASM_END
; 0x3420: mov byte ptr [rbp - 0x2b], cl (3 bytes)
0x3420.0: AND R_RCX:U64, 0xff:U64, V_00:U8
0x3420.1: ADD R_RBP:U64, 0xffffffffffffffd5:U64, V_01:U64
0x3420.2: STM V_00:U8, _, V_01:U64 ASM_END
; 0x3426: mov qword ptr [rbp - 0x68], rdi (4 bytes)
0x3426.0: ADD R_RBP:U64, 0xffffffffffffff98:U64, V_00:U64
0x3426.1: STM R_RDI:U64, _, V_00:U64 ASM_END
; 0x342a: vmovd xmm3, edi (4 bytes)
0x342a.0: AND R_RDI:U64, 0xffffffff:U64, V_00:U32
0x342a.1: OR V_00:U32, 0x0:U32, V_01:U32
0x342a.2: OR V_01:U32, 0x0:U32, V_02:U128
0x342a.3: OR V_02:U128, 0x0:U512, V_03:U128
0x342a.4: OR V_03:U128, 0x0:U128, R_ZMM3:U512 ASM_END
; 0x3438: lea ebx, [r10 + r10] (4 bytes)
0x3438.0: ADD R_R10:U64, R_R10:U64, V_00:U64
0x3438.1: OR V_00:U64, 0x0:U64, V_01:U32
0x3438.2: OR V_01:U32, 0x0:U32, R_RBX:U64 ASM_END
; 0x343c: mov r15d, r10d (3 bytes)
0x343c.0: AND R_R10:U64, 0xffffffff:U64, V_00:U32
0x343c.1: OR V_00:U32, 0x0:U64, V_01:U32
0x343c.2: OR V_01:U32, 0x0:U32, R_R15:U64 ASM_END
; 0x343f: sar r15b, 7 (4 bytes)
0x343f.0: AND R_R15:U64, 0xff:U64, V_00:U8
0x343f.2: SAR V_00:U8, 0x7:U8, V_02:U8
0x343f.22: AND R_R15:U64, 0xffffffffffffff00:U64, V_16:U64
0x343f.23: OR V_02:U8, 0x0:U8, V_17:U64
0x343f.24: OR V_16:U64, V_17:U64, R_R15:U64 ASM_END
; 0x3443: and r15b, 0x1b (4 bytes)
0x3443.0: AND R_R15:U64, 0xff:U64, V_00:U8
0x3443.1: AND V_00:U8, 0x1b:U8, V_01:U8
0x3443.18: AND R_R15:U64, 0xffffffffffffff00:U64, V_12:U64
0x3443.19: OR V_01:U8, 0x0:U8, V_13:U64
0x3443.20: OR V_12:U64, V_13:U64, R_R15:U64 ASM_END
; 0x3447: xor r15b, bl (3 bytes)
0x3447.0: AND R_R15:U64, 0xff:U64, V_00:U8
0x3447.1: AND R_RBX:U64, 0xff:U64, V_01:U8
0x3447.2: XOR V_00:U8, V_01:U8, V_02:U8
0x3447.19: AND R_R15:U64, 0xffffffffffffff00:U64, V_13:U64
0x3447.20: OR V_02:U8, 0x0:U8, V_14:U64
0x3447.21: OR V_13:U64, V_14:U64, R_R15:U64 ASM_END
; 0x344a: vpextrb ebx, xmm0, 9 (6 bytes)
0x344a.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x344a.1: SHR V_00:U128, 0x48:U8, V_01:U128
0x344a.2: OR V_01:U128, 0x0:U128, V_02:U8
0x344a.3: OR V_02:U8, 0x0:U8, V_03:U32
0x344a.4: OR V_03:U32, 0x0:U64, V_04:U32
0x344a.5: OR V_04:U32, 0x0:U32, R_RBX:U64 ASM_END
; 0x3450: xor al, r13b (3 bytes)
0x3450.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x3450.1: AND R_R13:U64, 0xff:U64, V_01:U8
0x3450.2: XOR V_00:U8, V_01:U8, V_02:U8
0x3450.19: AND R_RAX:U64, 0xffffffffffffff00:U64, V_13:U64
0x3450.20: OR V_02:U8, 0x0:U8, V_14:U64
0x3450.21: OR V_13:U64, V_14:U64, R_RAX:U64 ASM_END
; 0x3453: mov dword ptr [rbp - 0x50], eax (3 bytes)
0x3453.0: AND R_RAX:U64, 0xffffffff:U64, V_00:U32
0x3453.1: ADD R_RBP:U64, 0xffffffffffffffb0:U64, V_01:U64
0x3453.2: STM V_00:U32, _, V_01:U64 ASM_END
; 0x3459: vmovd xmm4, r12d (5 bytes)
0x3459.0: AND R_R12:U64, 0xffffffff:U64, V_00:U32
0x3459.1: OR V_00:U32, 0x0:U32, V_01:U32
0x3459.2: OR V_01:U32, 0x0:U32, V_02:U128
0x3459.3: OR V_02:U128, 0x0:U512, V_03:U128
0x3459.4: OR V_03:U128, 0x0:U128, R_ZMM4:U512 ASM_END
; 0x3467: vpextrb edi, xmm0, 0xd (6 bytes)
0x3467.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x3467.1: SHR V_00:U128, 0x68:U8, V_01:U128
0x3467.2: OR V_01:U128, 0x0:U128, V_02:U8
0x3467.3: OR V_02:U8, 0x0:U8, V_03:U32
0x3467.4: OR V_03:U32, 0x0:U64, V_04:U32
0x3467.5: OR V_04:U32, 0x0:U32, R_RDI:U64 ASM_END
; 0x346d: movzx ebx, byte ptr [rbx + r14] (5 bytes)
0x346d.0: ADD R_RBX:U64, 0x3d90:U64, V_00:U64
0x346d.1: LDM V_00:U64, _, V_01:U8
0x346d.2: OR V_01:U8, 0x0:U8, V_02:U32
0x346d.3: OR V_02:U32, 0x0:U64, V_03:U32
0x346d.4: OR V_03:U32, 0x0:U32, R_RBX:U64 ASM_END
; 0x347c: vpextrb eax, xmm0, 0xe (6 bytes)
0x347c.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x347c.1: SHR V_00:U128, 0x70:U8, V_01:U128
0x347c.2: OR V_01:U128, 0x0:U128, V_02:U8
0x347c.3: OR V_02:U8, 0x0:U8, V_03:U32
0x347c.4: OR V_03:U32, 0x0:U64, V_04:U32
0x347c.5: OR V_04:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x3482: movzx edi, byte ptr [rdi + r14] (5 bytes)
0x3482.0: ADD R_RDI:U64, 0x3d90:U64, V_00:U64
0x3482.1: LDM V_00:U64, _, V_01:U8
0x3482.2: OR V_01:U8, 0x0:U8, V_02:U32
0x3482.3: OR V_02:U32, 0x0:U64, V_03:U32
0x3482.4: OR V_03:U32, 0x0:U32, R_RDI:U64 ASM_END
; 0x3487: movzx eax, byte ptr [rax + r14] (5 bytes)
0x3487.0: ADD R_RAX:U64, 0x3d90:U64, V_00:U64
0x3487.1: LDM V_00:U64, _, V_01:U8
0x3487.2: OR V_01:U8, 0x0:U8, V_02:U32
0x3487.3: OR V_02:U32, 0x0:U64, V_03:U32
0x3487.4: OR V_03:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x3495: lea ecx, [rax + rax] (3 bytes)
0x3495.0: ADD R_RAX:U64, R_RAX:U64, V_00:U64
0x3495.1: OR V_00:U64, 0x0:U64, V_01:U32
0x3495.2: OR V_01:U32, 0x0:U32, R_RCX:U64 ASM_END
; 0x34aa: sar al, 7 (3 bytes)
0x34aa.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x34aa.2: SAR V_00:U8, 0x7:U8, V_02:U8
0x34aa.22: AND R_RAX:U64, 0xffffffffffffff00:U64, V_16:U64
0x34aa.23: OR V_02:U8, 0x0:U8, V_17:U64
0x34aa.24: OR V_16:U64, V_17:U64, R_RAX:U64 ASM_END
; 0x34ad: and al, 0x1b (2 bytes)
0x34ad.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x34ad.1: AND V_00:U8, 0x1b:U8, V_01:U8
0x34ad.18: AND R_RAX:U64, 0xffffffffffffff00:U64, V_12:U64
0x34ad.19: OR V_01:U8, 0x0:U8, V_13:U64
0x34ad.20: OR V_12:U64, V_13:U64, R_RAX:U64 ASM_END
; 0x34af: xor al, cl (2 bytes)
0x34af.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x34af.1: AND R_RCX:U64, 0xff:U64, V_01:U8
0x34af.2: XOR V_00:U8, V_01:U8, V_02:U8
0x34af.19: AND R_RAX:U64, 0xffffffffffffff00:U64, V_13:U64
0x34af.20: OR V_02:U8, 0x0:U8, V_14:U64
0x34af.21: OR V_13:U64, V_14:U64, R_RAX:U64 ASM_END
; 0x34b1: mov byte ptr [rbp - 0x2c], al (3 bytes)
0x34b1.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x34b1.1: ADD R_RBP:U64, 0xffffffffffffffd4:U64, V_01:U64
0x34b1.2: STM V_00:U8, _, V_01:U64 ASM_END
; 0x34b4: lea r11d, [rbx + rbx] (4 bytes)
0x34b4.0: ADD R_RBX:U64, R_RBX:U64, V_00:U64
0x34b4.1: OR V_00:U64, 0x0:U64, V_01:U32
0x34b4.2: OR V_01:U32, 0x0:U32, R_R11:U64 ASM_END
; 0x34b8: mov eax, ebx (2 bytes)
0x34b8.0: AND R_RBX:U64, 0xffffffff:U64, V_00:U32
0x34b8.1: OR V_00:U32, 0x0:U64, V_01:U32
0x34b8.2: OR V_01:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x34ba: sar al, 7 (3 bytes)
0x34ba.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x34ba.2: SAR V_00:U8, 0x7:U8, V_02:U8
0x34ba.22: AND R_RAX:U64, 0xffffffffffffff00:U64, V_16:U64
0x34ba.23: OR V_02:U8, 0x0:U8, V_17:U64
0x34ba.24: OR V_16:U64, V_17:U64, R_RAX:U64 ASM_END
; 0x34bd: and al, 0x1b (2 bytes)
0x34bd.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x34bd.1: AND V_00:U8, 0x1b:U8, V_01:U8
0x34bd.18: AND R_RAX:U64, 0xffffffffffffff00:U64, V_12:U64
0x34bd.19: OR V_01:U8, 0x0:U8, V_13:U64
0x34bd.20: OR V_12:U64, V_13:U64, R_RAX:U64 ASM_END
; 0x34bf: mov rcx, qword ptr [rbp - 0x48] (4 bytes)
0x34bf.0: ADD R_RBP:U64, 0xffffffffffffffb8:U64, V_00:U64
0x34bf.1: LDM V_00:U64, _, V_01:U64
0x34bf.2: STR V_01:U64, _, R_RCX:U64 ASM_END
; 0x34c3: xor r11b, cl (3 bytes)
0x34c3.0: AND R_R11:U64, 0xff:U64, V_00:U8
0x34c3.1: AND R_RCX:U64, 0xff:U64, V_01:U8
0x34c3.2: XOR V_00:U8, V_01:U8, V_02:U8
0x34c3.19: AND R_R11:U64, 0xffffffffffffff00:U64, V_13:U64
0x34c3.20: OR V_02:U8, 0x0:U8, V_14:U64
0x34c3.21: OR V_13:U64, V_14:U64, R_R11:U64 ASM_END
; 0x34c6: xor r11b, al (3 bytes)
0x34c6.0: AND R_R11:U64, 0xff:U64, V_00:U8
0x34c6.1: AND R_RAX:U64, 0xff:U64, V_01:U8
0x34c6.2: XOR V_00:U8, V_01:U8, V_02:U8
0x34c6.19: AND R_R11:U64, 0xffffffffffffff00:U64, V_13:U64
0x34c6.20: OR V_02:U8, 0x0:U8, V_14:U64
0x34c6.21: OR V_13:U64, V_14:U64, R_R11:U64 ASM_END
; 0x34cc: mov byte ptr [rbp - 0x34], r11b (4 bytes)
0x34cc.0: AND R_R11:U64, 0xff:U64, V_00:U8
0x34cc.1: ADD R_RBP:U64, 0xffffffffffffffcc:U64, V_01:U64
0x34cc.2: STM V_00:U8, _, V_01:U64 ASM_END
; 0x34f0: lea r10d, [r8 + r8] (4 bytes)
0x34f0.0: ADD R_R8:U64, R_R8:U64, V_00:U64
0x34f0.1: OR V_00:U64, 0x0:U64, V_01:U32
0x34f0.2: OR V_01:U32, 0x0:U32, R_R10:U64 ASM_END
; 0x34f4: mov eax, edi (2 bytes)
0x34f4.0: AND R_RDI:U64, 0xffffffff:U64, V_00:U32
0x34f4.1: OR V_00:U32, 0x0:U64, V_01:U32
0x34f4.2: OR V_01:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x34f6: xor al, r8b (3 bytes)
0x34f6.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x34f6.1: AND R_R8:U64, 0xff:U64, V_01:U8
0x34f6.2: XOR V_00:U8, V_01:U8, V_02:U8
0x34f6.19: AND R_RAX:U64, 0xffffffffffffff00:U64, V_13:U64
0x34f6.20: OR V_02:U8, 0x0:U8, V_14:U64
0x34f6.21: OR V_13:U64, V_14:U64, R_RAX:U64 ASM_END
; 0x34f9: mov byte ptr [rbp - 0x48], al (3 bytes)
0x34f9.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x34f9.1: ADD R_RBP:U64, 0xffffffffffffffb8:U64, V_01:U64
0x34f9.2: STM V_00:U8, _, V_01:U64 ASM_END
; 0x3512: mov edx, r8d (3 bytes)
0x3512.0: AND R_R8:U64, 0xffffffff:U64, V_00:U32
0x3512.1: OR V_00:U32, 0x0:U64, V_01:U32
0x3512.2: OR V_01:U32, 0x0:U32, R_RDX:U64 ASM_END
; 0x3515: sar dl, 7 (3 bytes)
0x3515.0: AND R_RDX:U64, 0xff:U64, V_00:U8
0x3515.2: SAR V_00:U8, 0x7:U8, V_02:U8
0x3515.22: AND R_RDX:U64, 0xffffffffffffff00:U64, V_16:U64
0x3515.23: OR V_02:U8, 0x0:U8, V_17:U64
0x3515.24: OR V_16:U64, V_17:U64, R_RDX:U64 ASM_END
; 0x3518: and dl, 0x1b (3 bytes)
0x3518.0: AND R_RDX:U64, 0xff:U64, V_00:U8
0x3518.1: AND V_00:U8, 0x1b:U8, V_01:U8
0x3518.18: AND R_RDX:U64, 0xffffffffffffff00:U64, V_12:U64
0x3518.19: OR V_01:U8, 0x0:U8, V_13:U64
0x3518.20: OR V_12:U64, V_13:U64, R_RDX:U64 ASM_END
; 0x351b: xor dl, r10b (3 bytes)
0x351b.0: AND R_RDX:U64, 0xff:U64, V_00:U8
0x351b.1: AND R_R10:U64, 0xff:U64, V_01:U8
0x351b.2: XOR V_00:U8, V_01:U8, V_02:U8
0x351b.19: AND R_RDX:U64, 0xffffffffffffff00:U64, V_13:U64
0x351b.20: OR V_02:U8, 0x0:U8, V_14:U64
0x351b.21: OR V_13:U64, V_14:U64, R_RDX:U64 ASM_END
; 0x351e: lea esi, [rdi + rdi] (3 bytes)
0x351e.0: ADD R_RDI:U64, R_RDI:U64, V_00:U64
0x351e.1: OR V_00:U64, 0x0:U64, V_01:U32
0x351e.2: OR V_01:U32, 0x0:U32, R_RSI:U64 ASM_END
; 0x3521: mov r10d, edi (3 bytes)
0x3521.0: AND R_RDI:U64, 0xffffffff:U64, V_00:U32
0x3521.1: OR V_00:U32, 0x0:U64, V_01:U32
0x3521.2: OR V_01:U32, 0x0:U32, R_R10:U64 ASM_END
; 0x3524: sar r10b, 7 (4 bytes)
0x3524.0: AND R_R10:U64, 0xff:U64, V_00:U8
0x3524.2: SAR V_00:U8, 0x7:U8, V_02:U8
0x3524.22: AND R_R10:U64, 0xffffffffffffff00:U64, V_16:U64
0x3524.23: OR V_02:U8, 0x0:U8, V_17:U64
0x3524.24: OR V_16:U64, V_17:U64, R_R10:U64 ASM_END
; 0x3528: and r10b, 0x1b (4 bytes)
0x3528.0: AND R_R10:U64, 0xff:U64, V_00:U8
0x3528.1: AND V_00:U8, 0x1b:U8, V_01:U8
0x3528.18: AND R_R10:U64, 0xffffffffffffff00:U64, V_12:U64
0x3528.19: OR V_01:U8, 0x0:U8, V_13:U64
0x3528.20: OR V_12:U64, V_13:U64, R_R10:U64 ASM_END
; 0x352c: xor r10b, sil (3 bytes)
0x352c.0: AND R_R10:U64, 0xff:U64, V_00:U8
0x352c.1: AND R_RSI:U64, 0xff:U64, V_01:U8
0x352c.2: XOR V_00:U8, V_01:U8, V_02:U8
0x352c.19: AND R_R10:U64, 0xffffffffffffff00:U64, V_13:U64
0x352c.20: OR V_02:U8, 0x0:U8, V_14:U64
0x352c.21: OR V_13:U64, V_14:U64, R_R10:U64 ASM_END
; 0x352f: mov r12d, r9d (3 bytes)
0x352f.0: AND R_R9:U64, 0xffffffff:U64, V_00:U32
0x352f.1: OR V_00:U32, 0x0:U64, V_01:U32
0x352f.2: OR V_01:U32, 0x0:U32, R_R12:U64 ASM_END
; 0x3532: mov r8, qword ptr [rbp - 0x60] (4 bytes)
0x3532.0: ADD R_RBP:U64, 0xffffffffffffffa0:U64, V_00:U64
0x3532.1: LDM V_00:U64, _, V_01:U64
0x3532.2: STR V_01:U64, _, R_R8:U64 ASM_END
; 0x3536: lea edi, [r8 + r8] (4 bytes)
0x3536.0: ADD R_R8:U64, R_R8:U64, V_00:U64
0x3536.1: OR V_00:U64, 0x0:U64, V_01:U32
0x3536.2: OR V_01:U32, 0x0:U32, R_RDI:U64 ASM_END
; 0x353a: xor dil, r9b (3 bytes)
0x353a.0: AND R_RDI:U64, 0xff:U64, V_00:U8
0x353a.1: AND R_R9:U64, 0xff:U64, V_01:U8
0x353a.2: XOR V_00:U8, V_01:U8, V_02:U8
0x353a.19: AND R_RDI:U64, 0xffffffffffffff00:U64, V_13:U64
0x353a.20: OR V_02:U8, 0x0:U8, V_14:U64
0x353a.21: OR V_13:U64, V_14:U64, R_RDI:U64 ASM_END
; 0x354f: vpextrb esi, xmm0, 0xb (6 bytes)
0x354f.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x354f.1: SHR V_00:U128, 0x58:U8, V_01:U128
0x354f.2: OR V_01:U128, 0x0:U128, V_02:U8
0x354f.3: OR V_02:U8, 0x0:U8, V_03:U32
0x354f.4: OR V_03:U32, 0x0:U64, V_04:U32
0x354f.5: OR V_04:U32, 0x0:U32, R_RSI:U64 ASM_END
; 0x3555: movzx r9d, byte ptr [rsi + r14] (5 bytes)
0x3555.0: ADD R_RSI:U64, 0x3d90:U64, V_00:U64
0x3555.1: LDM V_00:U64, _, V_01:U8
0x3555.2: OR V_01:U8, 0x0:U8, V_02:U32
0x3555.3: OR V_02:U32, 0x0:U64, V_03:U32
0x3555.4: OR V_03:U32, 0x0:U32, R_R9:U64 ASM_END
; 0x355a: mov ecx, r9d (3 bytes)
0x355a.0: AND R_R9:U64, 0xffffffff:U64, V_00:U32
0x355a.1: OR V_00:U32, 0x0:U64, V_01:U32
0x355a.2: OR V_01:U32, 0x0:U32, R_RCX:U64 ASM_END
; 0x355d: sar cl, 7 (3 bytes)
0x355d.0: AND R_RCX:U64, 0xff:U64, V_00:U8
0x355d.2: SAR V_00:U8, 0x7:U8, V_02:U8
0x355d.22: AND R_RCX:U64, 0xffffffffffffff00:U64, V_16:U64
0x355d.23: OR V_02:U8, 0x0:U8, V_17:U64
0x355d.24: OR V_16:U64, V_17:U64, R_RCX:U64 ASM_END
; 0x3560: and cl, 0x1b (3 bytes)
0x3560.0: AND R_RCX:U64, 0xff:U64, V_00:U8
0x3560.1: AND V_00:U8, 0x1b:U8, V_01:U8
0x3560.18: AND R_RCX:U64, 0xffffffffffffff00:U64, V_12:U64
0x3560.19: OR V_01:U8, 0x0:U8, V_13:U64
0x3560.20: OR V_12:U64, V_13:U64, R_RCX:U64 ASM_END
; 0x3563: lea esi, [r9 + r9] (4 bytes)
0x3563.0: ADD R_R9:U64, R_R9:U64, V_00:U64
0x3563.1: OR V_00:U64, 0x0:U64, V_01:U32
0x3563.2: OR V_01:U32, 0x0:U32, R_RSI:U64 ASM_END
; 0x3567: xor cl, sil (3 bytes)
0x3567.0: AND R_RCX:U64, 0xff:U64, V_00:U8
0x3567.1: AND R_RSI:U64, 0xff:U64, V_01:U8
0x3567.2: XOR V_00:U8, V_01:U8, V_02:U8
0x3567.19: AND R_RCX:U64, 0xffffffffffffff00:U64, V_13:U64
0x3567.20: OR V_02:U8, 0x0:U8, V_14:U64
0x3567.21: OR V_13:U64, V_14:U64, R_RCX:U64 ASM_END
; 0x356a: mov ebx, r8d (3 bytes)
0x356a.0: AND R_R8:U64, 0xffffffff:U64, V_00:U32
0x356a.1: OR V_00:U32, 0x0:U64, V_01:U32
0x356a.2: OR V_01:U32, 0x0:U32, R_RBX:U64 ASM_END
; 0x356d: sar bl, 7 (3 bytes)
0x356d.0: AND R_RBX:U64, 0xff:U64, V_00:U8
0x356d.2: SAR V_00:U8, 0x7:U8, V_02:U8
0x356d.22: AND R_RBX:U64, 0xffffffffffffff00:U64, V_16:U64
0x356d.23: OR V_02:U8, 0x0:U8, V_17:U64
0x356d.24: OR V_16:U64, V_17:U64, R_RBX:U64 ASM_END
; 0x3570: and bl, 0x1b (3 bytes)
0x3570.0: AND R_RBX:U64, 0xff:U64, V_00:U8
0x3570.1: AND V_00:U8, 0x1b:U8, V_01:U8
0x3570.18: AND R_RBX:U64, 0xffffffffffffff00:U64, V_12:U64
0x3570.19: OR V_01:U8, 0x0:U8, V_13:U64
0x3570.20: OR V_12:U64, V_13:U64, R_RBX:U64 ASM_END
; 0x3573: xor dil, bl (3 bytes)
0x3573.0: AND R_RDI:U64, 0xff:U64, V_00:U8
0x3573.1: AND R_RBX:U64, 0xff:U64, V_01:U8
0x3573.2: XOR V_00:U8, V_01:U8, V_02:U8
0x3573.19: AND R_RDI:U64, 0xffffffffffffff00:U64, V_13:U64
0x3573.20: OR V_02:U8, 0x0:U8, V_14:U64
0x3573.21: OR V_13:U64, V_14:U64, R_RDI:U64 ASM_END
; 0x3576: mov dword ptr [rbp - 0x4c], edi (3 bytes)
0x3576.0: AND R_RDI:U64, 0xffffffff:U64, V_00:U32
0x3576.1: ADD R_RBP:U64, 0xffffffffffffffb4:U64, V_01:U64
0x3576.2: STM V_00:U32, _, V_01:U64 ASM_END
; 0x3579: xor r12b, r8b (3 bytes)
0x3579.0: AND R_R12:U64, 0xff:U64, V_00:U8
0x3579.1: AND R_R8:U64, 0xff:U64, V_01:U8
0x3579.2: XOR V_00:U8, V_01:U8, V_02:U8
0x3579.19: AND R_R12:U64, 0xffffffffffffff00:U64, V_13:U64
0x3579.20: OR V_02:U8, 0x0:U8, V_14:U64
0x3579.21: OR V_13:U64, V_14:U64, R_R12:U64 ASM_END
; 0x357c: xor r12b, dl (3 bytes)
0x357c.0: AND R_R12:U64, 0xff:U64, V_00:U8
0x357c.1: AND R_RDX:U64, 0xff:U64, V_01:U8
0x357c.2: XOR V_00:U8, V_01:U8, V_02:U8
0x357c.19: AND R_R12:U64, 0xffffffffffffff00:U64, V_13:U64
0x357c.20: OR V_02:U8, 0x0:U8, V_14:U64
0x357c.21: OR V_13:U64, V_14:U64, R_R12:U64 ASM_END
; 0x357f: mov byte ptr [rbp - 0x29], r12b (4 bytes)
0x357f.0: AND R_R12:U64, 0xff:U64, V_00:U8
0x357f.1: ADD R_RBP:U64, 0xffffffffffffffd7:U64, V_01:U64
0x357f.2: STM V_00:U8, _, V_01:U64 ASM_END
; 0x359b: mov eax, edi (2 bytes)
0x359b.0: AND R_RDI:U64, 0xffffffff:U64, V_00:U32
0x359b.1: OR V_00:U32, 0x0:U64, V_01:U32
0x359b.2: OR V_01:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x359d: xor al, r8b (3 bytes)
0x359d.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x359d.1: AND R_R8:U64, 0xff:U64, V_01:U8
0x359d.2: XOR V_00:U8, V_01:U8, V_02:U8
0x359d.19: AND R_RAX:U64, 0xffffffffffffff00:U64, V_13:U64
0x359d.20: OR V_02:U8, 0x0:U8, V_14:U64
0x359d.21: OR V_13:U64, V_14:U64, R_RAX:U64 ASM_END
; 0x35a0: mov byte ptr [rbp - 0x2a], al (3 bytes)
0x35a0.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x35a0.1: ADD R_RBP:U64, 0xffffffffffffffd6:U64, V_01:U64
0x35a0.2: STM V_00:U8, _, V_01:U64 ASM_END
; 0x35a3: xor dl, r8b (3 bytes)
0x35a3.0: AND R_RDX:U64, 0xff:U64, V_00:U8
0x35a3.1: AND R_R8:U64, 0xff:U64, V_01:U8
0x35a3.2: XOR V_00:U8, V_01:U8, V_02:U8
0x35a3.19: AND R_RDX:U64, 0xffffffffffffff00:U64, V_13:U64
0x35a3.20: OR V_02:U8, 0x0:U8, V_14:U64
0x35a3.21: OR V_13:U64, V_14:U64, R_RDX:U64 ASM_END
; 0x35a6: vpextrb esi, xmm0, 1 (6 bytes)
0x35a6.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x35a6.1: SHR V_00:U128, 0x8:U8, V_01:U128
0x35a6.2: OR V_01:U128, 0x0:U128, V_02:U8
0x35a6.3: OR V_02:U8, 0x0:U8, V_03:U32
0x35a6.4: OR V_03:U32, 0x0:U64, V_04:U32
0x35a6.5: OR V_04:U32, 0x0:U32, R_RSI:U64 ASM_END
; 0x35af: movzx r8d, byte ptr [rsi + r14] (5 bytes)
0x35af.0: ADD R_RSI:U64, 0x3d90:U64, V_00:U64
0x35af.1: LDM V_00:U64, _, V_01:U8
0x35af.2: OR V_01:U8, 0x0:U8, V_02:U32
0x35af.3: OR V_02:U32, 0x0:U64, V_03:U32
0x35af.4: OR V_03:U32, 0x0:U32, R_R8:U64 ASM_END
; 0x35b4: vpextrb r14d, xmm0, 6 (6 bytes)
0x35b4.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x35b4.1: SHR V_00:U128, 0x30:U8, V_01:U128
0x35b4.2: OR V_01:U128, 0x0:U128, V_02:U8
0x35b4.3: OR V_02:U8, 0x0:U8, V_03:U32
0x35b4.4: OR V_03:U32, 0x0:U64, V_04:U32
0x35b4.5: OR V_04:U32, 0x0:U32, R_R14:U64 ASM_END
; 0x35ba: movzx r14d, byte ptr [r14 + rax] (5 bytes)
0x35ba.0: ADD R_R14:U64, 0x3d90:U64, V_00:U64
0x35ba.1: LDM V_00:U64, _, V_01:U8
0x35ba.2: OR V_01:U8, 0x0:U8, V_02:U32
0x35ba.3: OR V_02:U32, 0x0:U64, V_03:U32
0x35ba.4: OR V_03:U32, 0x0:U32, R_R14:U64 ASM_END
; 0x35bf: lea esi, [r8 + r8] (4 bytes)
0x35bf.0: ADD R_R8:U64, R_R8:U64, V_00:U64
0x35bf.1: OR V_00:U64, 0x0:U64, V_01:U32
0x35bf.2: OR V_01:U32, 0x0:U32, R_RSI:U64 ASM_END
; 0x35c3: xor sil, r14b (3 bytes)
0x35c3.0: AND R_RSI:U64, 0xff:U64, V_00:U8
0x35c3.1: AND R_R14:U64, 0xff:U64, V_01:U8
0x35c3.2: XOR V_00:U8, V_01:U8, V_02:U8
0x35c3.19: AND R_RSI:U64, 0xffffffffffffff00:U64, V_13:U64
0x35c3.20: OR V_02:U8, 0x0:U8, V_14:U64
0x35c3.21: OR V_13:U64, V_14:U64, R_RSI:U64 ASM_END
; 0x35cc: mov eax, r14d (3 bytes)
0x35cc.0: AND R_R14:U64, 0xffffffff:U64, V_00:U32
0x35cc.1: OR V_00:U32, 0x0:U64, V_01:U32
0x35cc.2: OR V_01:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x35cf: add r14d, r14d (3 bytes)
0x35cf.0: AND R_R14:U64, 0xffffffff:U64, V_00:U32
0x35cf.1: STR V_00:U32, _, V_01:U32
0x35cf.2: ADD V_00:U32, V_01:U32, V_02:U32
0x35cf.28: OR V_02:U32, 0x0:U64, V_22:U32
0x35cf.29: OR V_22:U32, 0x0:U32, R_R14:U64 ASM_END
; 0x35d2: sar al, 7 (3 bytes)
0x35d2.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x35d2.2: SAR V_00:U8, 0x7:U8, V_02:U8
0x35d2.22: AND R_RAX:U64, 0xffffffffffffff00:U64, V_16:U64
0x35d2.23: OR V_02:U8, 0x0:U8, V_17:U64
0x35d2.24: OR V_16:U64, V_17:U64, R_RAX:U64 ASM_END
; 0x35d5: and al, 0x1b (2 bytes)
0x35d5.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x35d5.1: AND V_00:U8, 0x1b:U8, V_01:U8
0x35d5.18: AND R_RAX:U64, 0xffffffffffffff00:U64, V_12:U64
0x35d5.19: OR V_01:U8, 0x0:U8, V_13:U64
0x35d5.20: OR V_12:U64, V_13:U64, R_RAX:U64 ASM_END
; 0x35d7: xor al, r14b (3 bytes)
0x35d7.0: AND R_RAX:U64, 0xff:U64, V_00:U8
0x35d7.1: AND R_R14:U64, 0xff:U64, V_01:U8
0x35d7.2: XOR V_00:U8, V_01:U8, V_02:U8
0x35d7.19: AND R_RAX:U64, 0xffffffffffffff00:U64, V_13:U64
0x35d7.20: OR V_02:U8, 0x0:U8, V_14:U64
0x35d7.21: OR V_13:U64, V_14:U64, R_RAX:U64 ASM_END
; 0x35da: mov ebx, r8d (3 bytes)
0x35da.0: AND R_R8:U64, 0xffffffff:U64, V_00:U32
0x35da.1: OR V_00:U32, 0x0:U64, V_01:U32
0x35da.2: OR V_01:U32, 0x0:U32, R_RBX:U64 ASM_END
; 0x35dd: sar bl, 7 (3 bytes)
0x35dd.0: AND R_RBX:U64, 0xff:U64, V_00:U8
0x35dd.2: SAR V_00:U8, 0x7:U8, V_02:U8
0x35dd.22: AND R_RBX:U64, 0xffffffffffffff00:U64, V_16:U64
0x35dd.23: OR V_02:U8, 0x0:U8, V_17:U64
0x35dd.24: OR V_16:U64, V_17:U64, R_RBX:U64 ASM_END
; 0x35e0: and bl, 0x1b (3 bytes)
0x35e0.0: AND R_RBX:U64, 0xff:U64, V_00:U8
0x35e0.1: AND V_00:U8, 0x1b:U8, V_01:U8
0x35e0.18: AND R_RBX:U64, 0xffffffffffffff00:U64, V_12:U64
0x35e0.19: OR V_01:U8, 0x0:U8, V_13:U64
0x35e0.20: OR V_12:U64, V_13:U64, R_RBX:U64 ASM_END
; 0x35e3: xor sil, bl (3 bytes)
0x35e3.0: AND R_RSI:U64, 0xff:U64, V_00:U8
0x35e3.1: AND R_RBX:U64, 0xff:U64, V_01:U8
0x35e3.2: XOR V_00:U8, V_01:U8, V_02:U8
0x35e3.19: AND R_RSI:U64, 0xffffffffffffff00:U64, V_13:U64
0x35e3.20: OR V_02:U8, 0x0:U8, V_14:U64
0x35e3.21: OR V_13:U64, V_14:U64, R_RSI:U64 ASM_END
; 0x35e9: xor sil, al (3 bytes)
0x35e9.0: AND R_RSI:U64, 0xff:U64, V_00:U8
0x35e9.1: AND R_RAX:U64, 0xff:U64, V_01:U8
0x35e9.2: XOR V_00:U8, V_01:U8, V_02:U8
0x35e9.19: AND R_RSI:U64, 0xffffffffffffff00:U64, V_13:U64
0x35e9.20: OR V_02:U8, 0x0:U8, V_14:U64
0x35e9.21: OR V_13:U64, V_14:U64, R_RSI:U64 ASM_END
; 0x35ef: movzx ebx, byte ptr [rbp - 0x40] (4 bytes)
0x35ef.0: ADD R_RBP:U64, 0xffffffffffffffc0:U64, V_00:U64
0x35ef.1: LDM V_00:U64, _, V_01:U8
0x35ef.2: OR V_01:U8, 0x0:U8, V_02:U32
0x35ef.3: OR V_02:U32, 0x0:U64, V_03:U32
0x35ef.4: OR V_03:U32, 0x0:U32, R_RBX:U64 ASM_END
; 0x35f3: vmovd xmm5, ebx (4 bytes)
0x35f3.0: AND R_RBX:U64, 0xffffffff:U64, V_00:U32
0x35f3.1: OR V_00:U32, 0x0:U32, V_01:U32
0x35f3.2: OR V_01:U32, 0x0:U32, V_02:U128
0x35f3.3: OR V_02:U128, 0x0:U512, V_03:U128
0x35f3.4: OR V_03:U128, 0x0:U128, R_ZMM5:U512 ASM_END
; 0x35f7: mov rbx, qword ptr [rbp - 0x68] (4 bytes)
0x35f7.0: ADD R_RBP:U64, 0xffffffffffffff98:U64, V_00:U64
0x35f7.1: LDM V_00:U64, _, V_01:U64
0x35f7.2: STR V_01:U64, _, R_RBX:U64 ASM_END
; 0x35fb: vpextrb edi, xmm0, 0xc (6 bytes)
0x35fb.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x35fb.1: SHR V_00:U128, 0x60:U8, V_01:U128
0x35fb.2: OR V_01:U128, 0x0:U128, V_02:U8
0x35fb.3: OR V_02:U8, 0x0:U8, V_03:U32
0x35fb.4: OR V_03:U32, 0x0:U64, V_04:U32
0x35fb.5: OR V_04:U32, 0x0:U32, R_RDI:U64 ASM_END
; 0x3601: mov qword ptr [rbp - 0x40], rdi (4 bytes)
0x3601.0: ADD R_RBP:U64, 0xffffffffffffffc0:U64, V_00:U64
0x3601.1: STM R_RDI:U64, _, V_00:U64 ASM_END
; 0x3605: xor bl, byte ptr [rbp - 0x2b] (3 bytes)
0x3605.0: AND R_RBX:U64, 0xff:U64, V_00:U8
0x3605.1: ADD R_RBP:U64, 0xffffffffffffffd5:U64, V_01:U64
0x3605.2: LDM V_01:U64, _, V_02:U8
0x3605.3: XOR V_00:U8, V_02:U8, V_03:U8
0x3605.20: AND R_RBX:U64, 0xffffffffffffff00:U64, V_14:U64
0x3605.21: OR V_03:U8, 0x0:U8, V_15:U64
0x3605.22: OR V_14:U64, V_15:U64, R_RBX:U64 ASM_END
; 0x360f: mov r12, qword ptr [rbp - 0x40] (4 bytes)
0x360f.0: ADD R_RBP:U64, 0xffffffffffffffc0:U64, V_00:U64
0x360f.1: LDM V_00:U64, _, V_01:U64
0x360f.2: STR V_01:U64, _, R_R12:U64 ASM_END
; 0x3613: movzx r9d, byte ptr [r12 + r9] (5 bytes)
0x3613.0: ADD R_R12:U64, 0x3d90:U64, V_00:U64
0x3613.1: LDM V_00:U64, _, V_01:U8
0x3613.2: OR V_01:U8, 0x0:U8, V_02:U32
0x3613.3: OR V_02:U32, 0x0:U64, V_03:U32
0x3613.4: OR V_03:U32, 0x0:U32, R_R9:U64 ASM_END
; 0x3618: xor byte ptr [rbp - 0x34], r15b (4 bytes)
0x3618.0: ADD R_RBP:U64, 0xffffffffffffffcc:U64, V_00:U64
0x3618.1: LDM V_00:U64, _, V_01:U8
0x3618.2: AND R_R15:U64, 0xff:U64, V_02:U8
0x3618.3: XOR V_01:U8, V_02:U8, V_03:U8
0x3618.20: STR V_00:U64, _, V_14:U64
0x3618.21: STM V_03:U8, _, V_14:U64 ASM_END
; 0x361f: xor r8b, r9b (3 bytes)
0x361f.0: AND R_R8:U64, 0xff:U64, V_00:U8
0x361f.1: AND R_R9:U64, 0xff:U64, V_01:U8
0x361f.2: XOR V_00:U8, V_01:U8, V_02:U8
0x361f.19: AND R_R8:U64, 0xffffffffffffff00:U64, V_13:U64
0x361f.20: OR V_02:U8, 0x0:U8, V_14:U64
0x361f.21: OR V_13:U64, V_14:U64, R_R8:U64 ASM_END
; 0x3622: mov r15d, r8d (3 bytes)
0x3622.0: AND R_R8:U64, 0xffffffff:U64, V_00:U32
0x3622.1: OR V_00:U32, 0x0:U64, V_01:U32
0x3622.2: OR V_01:U32, 0x0:U32, R_R15:U64 ASM_END
; 0x3625: xor r15b, cl (3 bytes)
0x3625.0: AND R_R15:U64, 0xff:U64, V_00:U8
0x3625.1: AND R_RCX:U64, 0xff:U64, V_01:U8
0x3625.2: XOR V_00:U8, V_01:U8, V_02:U8
0x3625.19: AND R_R15:U64, 0xffffffffffffff00:U64, V_13:U64
0x3625.20: OR V_02:U8, 0x0:U8, V_14:U64
0x3625.21: OR V_13:U64, V_14:U64, R_R15:U64 ASM_END
; 0x3628: movzx ecx, bl (3 bytes)
0x3628.0: AND R_RBX:U64, 0xff:U64, V_00:U8
0x3628.1: OR V_00:U8, 0x0:U8, V_01:U32
0x3628.2: OR V_01:U32, 0x0:U64, V_02:U32
0x3628.3: OR V_02:U32, 0x0:U32, R_RCX:U64 ASM_END
; 0x362b: mov dword ptr [rbp - 0x40], ecx (3 bytes)
0x362b.0: AND R_RCX:U64, 0xffffffff:U64, V_00:U32
0x362b.1: ADD R_RBP:U64, 0xffffffffffffffc0:U64, V_01:U64
0x362b.2: STM V_00:U32, _, V_01:U64 ASM_END
; 0x363c: movzx ecx, byte ptr [rbp - 0x34] (4 bytes)
0x363c.0: ADD R_RBP:U64, 0xffffffffffffffcc:U64, V_00:U64
0x363c.1: LDM V_00:U64, _, V_01:U8
0x363c.2: OR V_01:U8, 0x0:U8, V_02:U32
0x363c.3: OR V_02:U32, 0x0:U64, V_03:U32
0x363c.4: OR V_03:U32, 0x0:U32, R_RCX:U64 ASM_END
; 0x3640: mov dword ptr [rbp - 0x34], ecx (3 bytes)
0x3640.0: AND R_RCX:U64, 0xffffffff:U64, V_00:U32
0x3640.1: ADD R_RBP:U64, 0xffffffffffffffcc:U64, V_01:U64
0x3640.2: STM V_00:U32, _, V_01:U64 ASM_END
; 0x364e: movzx edi, byte ptr [rbp - 0x29] (4 bytes)
0x364e.0: ADD R_RBP:U64, 0xffffffffffffffd7:U64, V_00:U64
0x364e.1: LDM V_00:U64, _, V_01:U8
0x364e.2: OR V_01:U8, 0x0:U8, V_02:U32
0x364e.3: OR V_02:U32, 0x0:U64, V_03:U32
0x364e.4: OR V_03:U32, 0x0:U32, R_RDI:U64 ASM_END
; 0x365c: movzx edx, dl (3 bytes)
0x365c.0: AND R_RDX:U64, 0xff:U64, V_00:U8
0x365c.1: OR V_00:U8, 0x0:U8, V_01:U32
0x365c.2: OR V_01:U32, 0x0:U64, V_02:U32
0x365c.3: OR V_02:U32, 0x0:U32, R_RDX:U64 ASM_END
; 0x3678: movzx r11d, r15b (4 bytes)
0x3678.0: AND R_R15:U64, 0xff:U64, V_00:U8
0x3678.1: OR V_00:U8, 0x0:U8, V_01:U32
0x3678.2: OR V_01:U32, 0x0:U64, V_02:U32
0x3678.3: OR V_02:U32, 0x0:U32, R_R11:U64 ASM_END
; 0x368d: movzx r10d, r10b (4 bytes)
0x368d.0: AND R_R10:U64, 0xff:U64, V_00:U8
0x368d.1: OR V_00:U8, 0x0:U8, V_01:U32
0x368d.2: OR V_01:U32, 0x0:U64, V_02:U32
0x368d.3: OR V_02:U32, 0x0:U32, R_R10:U64 ASM_END
; 0x3707: mov rax, qword ptr [rbp - 0x70] (4 bytes)
0x3707.0: ADD R_RBP:U64, 0xffffffffffffff90:U64, V_00:U64
0x3707.1: LDM V_00:U64, _, V_01:U64
0x3707.2: STR V_01:U64, _, R_RAX:U64 ASM_END
; 0x370b: vpshufb xmm4, xmm4, xmm1 (5 bytes)
0x370b.0: OR R_ZMM4:U512, 0x0:U512, V_00:U128
0x370b.1: OR R_ZMM1:U512, 0x0:U512, V_01:U128
0x370b.2: OR V_01:U128, 0x0:U128, V_02:U8
0x370b.3: SHR V_02:U8, 0x7:U8, V_03:U8
0x370b.4: AND V_03:U8, 0x1:U8, V_04:U8
0x370b.5: AND V_02:U8, 0xf:U8, V_05:U8
0x370b.6: OR V_05:U8, 0x0:U8, V_06:U16
0x370b.7: SHL V_06:U16, 0x3:U8, V_07:U16
0x370b.8: SHR V_00:U128, V_07:U16, V_08:U128
0x370b.9: OR V_08:U128, 0x0:U128, V_09:U8
0x370b.10: SUB 0x0:U8, V_04:U8, V_10:U8
0x370b.11: NOT V_10:U8, _, V_11:U8
0x370b.12: AND V_09:U8, V_11:U8, V_12:U8
0x370b.13: OR V_12:U8, 0x0:U8, V_13:U128
0x370b.14: SHR V_01:U128, 0x8:U16, V_14:U128
0x370b.15: OR V_14:U128, 0x0:U128, V_15:U8
0x370b.16: SHR V_15:U8, 0x7:U8, V_16:U8
0x370b.17: AND V_16:U8, 0x1:U8, V_17:U8
0x370b.18: AND V_15:U8, 0xf:U8, V_18:U8
0x370b.19: OR V_18:U8, 0x0:U8, V_19:U16
0x370b.20: SHL V_19:U16, 0x3:U8, V_20:U16
0x370b.21: SHR V_00:U128, V_20:U16, V_21:U128
0x370b.22: OR V_21:U128, 0x0:U128, V_22:U8
0x370b.23: SUB 0x0:U8, V_17:U8, V_23:U8
0x370b.24: NOT V_23:U8, _, V_24:U8
0x370b.25: AND V_22:U8, V_24:U8, V_25:U8
0x370b.26: OR V_25:U8, 0x0:U8, V_26:U128
0x370b.27: SHL V_26:U128, 0x8:U16, V_27:U128
0x370b.28: OR V_13:U128, V_27:U128, V_28:U128
0x370b.29: SHR V_01:U128, 0x10:U16, V_29:U128
0x370b.30: OR V_29:U128, 0x0:U128, V_30:U8
0x370b.31: SHR V_30:U8, 0x7:U8, V_31:U8
0x370b.32: AND V_31:U8, 0x1:U8, V_32:U8
0x370b.33: AND V_30:U8, 0xf:U8, V_33:U8
0x370b.34: OR V_33:U8, 0x0:U8, V_34:U16
0x370b.35: SHL V_34:U16, 0x3:U8, V_35:U16
0x370b.36: SHR V_00:U128, V_35:U16, V_36:U128
0x370b.37: OR V_36:U128, 0x0:U128, V_37:U8
0x370b.38: SUB 0x0:U8, V_32:U8, V_38:U8
0x370b.39: NOT V_38:U8, _, V_39:U8
0x370b.40: AND V_37:U8, V_39:U8, V_40:U8
0x370b.41: OR V_40:U8, 0x0:U8, V_41:U128
0x370b.42: SHL V_41:U128, 0x10:U16, V_42:U128
0x370b.43: OR V_28:U128, V_42:U128, V_43:U128
0x370b.44: SHR V_01:U128, 0x18:U16, V_44:U128
0x370b.45: OR V_44:U128, 0x0:U128, V_45:U8
0x370b.46: SHR V_45:U8, 0x7:U8, V_46:U8
0x370b.47: AND V_46:U8, 0x1:U8, V_47:U8
0x370b.48: AND V_45:U8, 0xf:U8, V_48:U8
0x370b.49: OR V_48:U8, 0x0:U8, V_49:U16
0x370b.50: SHL V_49:U16, 0x3:U8, V_50:U16
0x370b.51: SHR V_00:U128, V_50:U16, V_51:U128
0x370b.52: OR V_51:U128, 0x0:U128, V_52:U8
0x370b.53: SUB 0x0:U8, V_47:U8, V_53:U8
0x370b.54: NOT V_53:U8, _, V_54:U8
0x370b.55: AND V_52:U8, V_54:U8, V_55:U8
0x370b.56: OR V_55:U8, 0x0:U8, V_56:U128
0x370b.57: SHL V_56:U128, 0x18:U16, V_57:U128
0x370b.58: OR V_43:U128, V_57:U128, V_58:U128
0x370b.59: SHR V_01:U128, 0x20:U16, V_59:U128
0x370b.60: OR V_59:U128, 0x0:U128, V_60:U8
0x370b.61: SHR V_60:U8, 0x7:U8, V_61:U8
0x370b.62: AND V_61:U8, 0x1:U8, V_62:U8
0x370b.63: AND V_60:U8, 0xf:U8, V_63:U8
0x370b.64: OR V_63:U8, 0x0:U8, V_64:U16
0x370b.65: SHL V_64:U16, 0x3:U8, V_65:U16
0x370b.66: SHR V_00:U128, V_65:U16, V_66:U128
0x370b.67: OR V_66:U128, 0x0:U128, V_67:U8
0x370b.68: SUB 0x0:U8, V_62:U8, V_68:U8
0x370b.69: NOT V_68:U8, _, V_69:U8
0x370b.70: AND V_67:U8, V_69:U8, V_70:U8
0x370b.71: OR V_70:U8, 0x0:U8, V_71:U128
0x370b.72: SHL V_71:U128, 0x20:U16, V_72:U128
0x370b.73: OR V_58:U128, V_72:U128, V_73:U128
0x370b.74: SHR V_01:U128, 0x28:U16, V_74:U128
0x370b.75: OR V_74:U128, 0x0:U128, V_75:U8
0x370b.76: SHR V_75:U8, 0x7:U8, V_76:U8
0x370b.77: AND V_76:U8, 0x1:U8, V_77:U8
0x370b.78: AND V_75:U8, 0xf:U8, V_78:U8
0x370b.79: OR V_78:U8, 0x0:U8, V_79:U16
0x370b.80: SHL V_79:U16, 0x3:U8, V_80:U16
0x370b.81: SHR V_00:U128, V_80:U16, V_81:U128
0x370b.82: OR V_81:U128, 0x0:U128, V_82:U8
0x370b.83: SUB 0x0:U8, V_77:U8, V_83:U8
0x370b.84: NOT V_83:U8, _, V_84:U8
0x370b.85: AND V_82:U8, V_84:U8, V_85:U8
0x370b.86: OR V_85:U8, 0x0:U8, V_86:U128
0x370b.87: SHL V_86:U128, 0x28:U16, V_87:U128
0x370b.88: OR V_73:U128, V_87:U128, V_88:U128
0x370b.89: SHR V_01:U128, 0x30:U16, V_89:U128
0x370b.90: OR V_89:U128, 0x0:U128, V_90:U8
0x370b.91: SHR V_90:U8, 0x7:U8, V_91:U8
0x370b.92: AND V_91:U8, 0x1:U8, V_92:U8
0x370b.93: AND V_90:U8, 0xf:U8, V_93:U8
0x370b.94: OR V_93:U8, 0x0:U8, V_94:U16
0x370b.95: SHL V_94:U16, 0x3:U8, V_95:U16
0x370b.96: SHR V_00:U128, V_95:U16, V_96:U128
0x370b.97: OR V_96:U128, 0x0:U128, V_97:U8
0x370b.98: SUB 0x0:U8, V_92:U8, V_98:U8
0x370b.99: NOT V_98:U8, _, V_99:U8
0x370b.100: AND V_97:U8, V_99:U8, V_100:U8
0x370b.101: OR V_100:U8, 0x0:U8, V_101:U128
0x370b.102: SHL V_101:U128, 0x30:U16, V_102:U128
0x370b.103: OR V_88:U128, V_102:U128, V_103:U128
0x370b.104: SHR V_01:U128, 0x38:U16, V_104:U128
0x370b.105: OR V_104:U128, 0x0:U128, V_105:U8
0x370b.106: SHR V_105:U8, 0x7:U8, V_106:U8
0x370b.107: AND V_106:U8, 0x1:U8, V_107:U8
0x370b.108: AND V_105:U8, 0xf:U8, V_108:U8
0x370b.109: OR V_108:U8, 0x0:U8, V_109:U16
0x370b.110: SHL V_109:U16, 0x3:U8, V_110:U16
0x370b.111: SHR V_00:U128, V_110:U16, V_111:U128
0x370b.112: OR V_111:U128, 0x0:U128, V_112:U8
0x370b.113: SUB 0x0:U8, V_107:U8, V_113:U8
0x370b.114: NOT V_113:U8, _, V_114:U8
0x370b.115: AND V_112:U8, V_114:U8, V_115:U8
0x370b.116: OR V_115:U8, 0x0:U8, V_116:U128
0x370b.117: SHL V_116:U128, 0x38:U16, V_117:U128
0x370b.118: OR V_103:U128, V_117:U128, V_118:U128
0x370b.119: SHR V_01:U128, 0x40:U16, V_119:U128
0x370b.120: OR V_119:U128, 0x0:U128, V_120:U8
0x370b.121: SHR V_120:U8, 0x7:U8, V_121:U8
0x370b.122: AND V_121:U8, 0x1:U8, V_122:U8
0x370b.123: AND V_120:U8, 0xf:U8, V_123:U8
0x370b.124: OR V_123:U8, 0x0:U8, V_124:U16
0x370b.125: SHL V_124:U16, 0x3:U8, V_125:U16
0x370b.126: SHR V_00:U128, V_125:U16, V_126:U128
0x370b.127: OR V_126:U128, 0x0:U128, V_127:U8
0x370b.128: SUB 0x0:U8, V_122:U8, V_128:U8
0x370b.129: NOT V_128:U8, _, V_129:U8
0x370b.130: AND V_127:U8, V_129:U8, V_130:U8
0x370b.131: OR V_130:U8, 0x0:U8, V_131:U128
0x370b.132: SHL V_131:U128, 0x40:U16, V_132:U128
0x370b.133: OR V_118:U128, V_132:U128, V_133:U128
0x370b.134: SHR V_01:U128, 0x48:U16, V_134:U128
0x370b.135: OR V_134:U128, 0x0:U128, V_135:U8
0x370b.136: SHR V_135:U8, 0x7:U8, V_136:U8
0x370b.137: AND V_136:U8, 0x1:U8, V_137:U8
0x370b.138: AND V_135:U8, 0xf:U8, V_138:U8
0x370b.139: OR V_138:U8, 0x0:U8, V_139:U16
0x370b.140: SHL V_139:U16, 0x3:U8, V_140:U16
0x370b.141: SHR V_00:U128, V_140:U16, V_141:U128
0x370b.142: OR V_141:U128, 0x0:U128, V_142:U8
0x370b.143: SUB 0x0:U8, V_137:U8, V_143:U8
0x370b.144: NOT V_143:U8, _, V_144:U8
0x370b.145: AND V_142:U8, V_144:U8, V_145:U8
0x370b.146: OR V_145:U8, 0x0:U8, V_146:U128
0x370b.147: SHL V_146:U128, 0x48:U16, V_147:U128
0x370b.148: OR V_133:U128, V_147:U128, V_148:U128
0x370b.149: SHR V_01:U128, 0x50:U16, V_149:U128
0x370b.150: OR V_149:U128, 0x0:U128, V_150:U8
0x370b.151: SHR V_150:U8, 0x7:U8, V_151:U8
0x370b.152: AND V_151:U8, 0x1:U8, V_152:U8
0x370b.153: AND V_150:U8, 0xf:U8, V_153:U8
0x370b.154: OR V_153:U8, 0x0:U8, V_154:U16
0x370b.155: SHL V_154:U16, 0x3:U8, V_155:U16
0x370b.156: SHR V_00:U128, V_155:U16, V_156:U128
0x370b.157: OR V_156:U128, 0x0:U128, V_157:U8
0x370b.158: SUB 0x0:U8, V_152:U8, V_158:U8
0x370b.159: NOT V_158:U8, _, V_159:U8
0x370b.160: AND V_157:U8, V_159:U8, V_160:U8
0x370b.161: OR V_160:U8, 0x0:U8, V_161:U128
0x370b.162: SHL V_161:U128, 0x50:U16, V_162:U128
0x370b.163: OR V_148:U128, V_162:U128, V_163:U128
0x370b.164: SHR V_01:U128, 0x58:U16, V_164:U128
0x370b.165: OR V_164:U128, 0x0:U128, V_165:U8
0x370b.166: SHR V_165:U8, 0x7:U8, V_166:U8
0x370b.167: AND V_166:U8, 0x1:U8, V_167:U8
0x370b.168: AND V_165:U8, 0xf:U8, V_168:U8
0x370b.169: OR V_168:U8, 0x0:U8, V_169:U16
0x370b.170: SHL V_169:U16, 0x3:U8, V_170:U16
0x370b.171: SHR V_00:U128, V_170:U16, V_171:U128
0x370b.172: OR V_171:U128, 0x0:U128, V_172:U8
0x370b.173: SUB 0x0:U8, V_167:U8, V_173:U8
0x370b.174: NOT V_173:U8, _, V_174:U8
0x370b.175: AND V_172:U8, V_174:U8, V_175:U8
0x370b.176: OR V_175:U8, 0x0:U8, V_176:U128
0x370b.177: SHL V_176:U128, 0x58:U16, V_177:U128
0x370b.178: OR V_163:U128, V_177:U128, V_178:U128
0x370b.179: SHR V_01:U128, 0x60:U16, V_179:U128
0x370b.180: OR V_179:U128, 0x0:U128, V_180:U8
0x370b.181: SHR V_180:U8, 0x7:U8, V_181:U8
0x370b.182: AND V_181:U8, 0x1:U8, V_182:U8
0x370b.183: AND V_180:U8, 0xf:U8, V_183:U8
0x370b.184: OR V_183:U8, 0x0:U8, V_184:U16
0x370b.185: SHL V_184:U16, 0x3:U8, V_185:U16
0x370b.186: SHR V_00:U128, V_185:U16, V_186:U128
0x370b.187: OR V_186:U128, 0x0:U128, V_187:U8
0x370b.188: SUB 0x0:U8, V_182:U8, V_188:U8
0x370b.189: NOT V_188:U8, _, V_189:U8
0x370b.190: AND V_187:U8, V_189:U8, V_190:U8
0x370b.191: OR V_190:U8, 0x0:U8, V_191:U128
0x370b.192: SHL V_191:U128, 0x60:U16, V_192:U128
0x370b.193: OR V_178:U128, V_192:U128, V_193:U128
0x370b.194: SHR V_01:U128, 0x68:U16, V_194:U128
0x370b.195: OR V_194:U128, 0x0:U128, V_195:U8
0x370b.196: SHR V_195:U8, 0x7:U8, V_196:U8
0x370b.197: AND V_196:U8, 0x1:U8, V_197:U8
0x370b.198: AND V_195:U8, 0xf:U8, V_198:U8
0x370b.199: OR V_198:U8, 0x0:U8, V_199:U16
0x370b.200: SHL V_199:U16, 0x3:U8, V_200:U16
0x370b.201: SHR V_00:U128, V_200:U16, V_201:U128
0x370b.202: OR V_201:U128, 0x0:U128, V_202:U8
0x370b.203: SUB 0x0:U8, V_197:U8, V_203:U8
0x370b.204: NOT V_203:U8, _, V_204:U8
0x370b.205: AND V_202:U8, V_204:U8, V_205:U8
0x370b.206: OR V_205:U8, 0x0:U8, V_206:U128
0x370b.207: SHL V_206:U128, 0x68:U16, V_207:U128
0x370b.208: OR V_193:U128, V_207:U128, V_208:U128
0x370b.209: SHR V_01:U128, 0x70:U16, V_209:U128
0x370b.210: OR V_209:U128, 0x0:U128, V_210:U8
0x370b.211: SHR V_210:U8, 0x7:U8, V_211:U8
0x370b.212: AND V_211:U8, 0x1:U8, V_212:U8
0x370b.213: AND V_210:U8, 0xf:U8, V_213:U8
0x370b.214: OR V_213:U8, 0x0:U8, V_214:U16
0x370b.215: SHL V_214:U16, 0x3:U8, V_215:U16
0x370b.216: SHR V_00:U128, V_215:U16, V_216:U128
0x370b.217: OR V_216:U128, 0x0:U128, V_217:U8
0x370b.218: SUB 0x0:U8, V_212:U8, V_218:U8
0x370b.219: NOT V_218:U8, _, V_219:U8
0x370b.220: AND V_217:U8, V_219:U8, V_220:U8
0x370b.221: OR V_220:U8, 0x0:U8, V_221:U128
0x370b.222: SHL V_221:U128, 0x70:U16, V_222:U128
0x370b.223: OR V_208:U128, V_222:U128, V_223:U128
0x370b.224: SHR V_01:U128, 0x78:U16, V_224:U128
0x370b.225: OR V_224:U128, 0x0:U128, V_225:U8
0x370b.226: SHR V_225:U8, 0x7:U8, V_226:U8
0x370b.227: AND V_226:U8, 0x1:U8, V_227:U8
0x370b.228: AND V_225:U8, 0xf:U8, V_228:U8
0x370b.229: OR V_228:U8, 0x0:U8, V_229:U16
0x370b.230: SHL V_229:U16, 0x3:U8, V_230:U16
0x370b.231: SHR V_00:U128, V_230:U16, V_231:U128
0x370b.232: OR V_231:U128, 0x0:U128, V_232:U8
0x370b.233: SUB 0x0:U8, V_227:U8, V_233:U8
0x370b.234: NOT V_233:U8, _, V_234:U8
0x370b.235: AND V_232:U8, V_234:U8, V_235:U8
0x370b.236: OR V_235:U8, 0x0:U8, V_236:U128
0x370b.237: SHL V_236:U128, 0x78:U16, V_237:U128
0x370b.238: OR V_223:U128, V_237:U128, V_238:U128
0x370b.239: OR V_238:U128, 0x0:U512, V_239:U128
0x370b.240: OR V_239:U128, 0x0:U128, R_ZMM4:U512 ASM_END
; 0x3710: vpxor xmm4, xmm0, xmm4 (4 bytes)
0x3710.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x3710.1: OR R_ZMM4:U512, 0x0:U512, V_01:U128
0x3710.2: XOR V_00:U128, V_01:U128, V_02:U128
0x3710.3: OR V_02:U128, 0x0:U512, V_03:U128
0x3710.4: OR V_03:U128, 0x0:U128, R_ZMM4:U512 ASM_END
; 0x3714: vpshufb xmm0, xmm3, xmm2 (5 bytes)
0x3714.0: OR R_ZMM3:U512, 0x0:U512, V_00:U128
0x3714.1: OR R_ZMM2:U512, 0x0:U512, V_01:U128
0x3714.2: OR V_01:U128, 0x0:U128, V_02:U8
0x3714.3: SHR V_02:U8, 0x7:U8, V_03:U8
0x3714.4: AND V_03:U8, 0x1:U8, V_04:U8
0x3714.5: AND V_02:U8, 0xf:U8, V_05:U8
0x3714.6: OR V_05:U8, 0x0:U8, V_06:U16
0x3714.7: SHL V_06:U16, 0x3:U8, V_07:U16
0x3714.8: SHR V_00:U128, V_07:U16, V_08:U128
0x3714.9: OR V_08:U128, 0x0:U128, V_09:U8
0x3714.10: SUB 0x0:U8, V_04:U8, V_10:U8
0x3714.11: NOT V_10:U8, _, V_11:U8
0x3714.12: AND V_09:U8, V_11:U8, V_12:U8
0x3714.13: OR V_12:U8, 0x0:U8, V_13:U128
0x3714.14: SHR V_01:U128, 0x8:U16, V_14:U128
0x3714.15: OR V_14:U128, 0x0:U128, V_15:U8
0x3714.16: SHR V_15:U8, 0x7:U8, V_16:U8
0x3714.17: AND V_16:U8, 0x1:U8, V_17:U8
0x3714.18: AND V_15:U8, 0xf:U8, V_18:U8
0x3714.19: OR V_18:U8, 0x0:U8, V_19:U16
0x3714.20: SHL V_19:U16, 0x3:U8, V_20:U16
0x3714.21: SHR V_00:U128, V_20:U16, V_21:U128
0x3714.22: OR V_21:U128, 0x0:U128, V_22:U8
0x3714.23: SUB 0x0:U8, V_17:U8, V_23:U8
0x3714.24: NOT V_23:U8, _, V_24:U8
0x3714.25: AND V_22:U8, V_24:U8, V_25:U8
0x3714.26: OR V_25:U8, 0x0:U8, V_26:U128
0x3714.27: SHL V_26:U128, 0x8:U16, V_27:U128
0x3714.28: OR V_13:U128, V_27:U128, V_28:U128
0x3714.29: SHR V_01:U128, 0x10:U16, V_29:U128
0x3714.30: OR V_29:U128, 0x0:U128, V_30:U8
0x3714.31: SHR V_30:U8, 0x7:U8, V_31:U8
0x3714.32: AND V_31:U8, 0x1:U8, V_32:U8
0x3714.33: AND V_30:U8, 0xf:U8, V_33:U8
0x3714.34: OR V_33:U8, 0x0:U8, V_34:U16
0x3714.35: SHL V_34:U16, 0x3:U8, V_35:U16
0x3714.36: SHR V_00:U128, V_35:U16, V_36:U128
0x3714.37: OR V_36:U128, 0x0:U128, V_37:U8
0x3714.38: SUB 0x0:U8, V_32:U8, V_38:U8
0x3714.39: NOT V_38:U8, _, V_39:U8
0x3714.40: AND V_37:U8, V_39:U8, V_40:U8
0x3714.41: OR V_40:U8, 0x0:U8, V_41:U128
0x3714.42: SHL V_41:U128, 0x10:U16, V_42:U128
0x3714.43: OR V_28:U128, V_42:U128, V_43:U128
0x3714.44: SHR V_01:U128, 0x18:U16, V_44:U128
0x3714.45: OR V_44:U128, 0x0:U128, V_45:U8
0x3714.46: SHR V_45:U8, 0x7:U8, V_46:U8
0x3714.47: AND V_46:U8, 0x1:U8, V_47:U8
0x3714.48: AND V_45:U8, 0xf:U8, V_48:U8
0x3714.49: OR V_48:U8, 0x0:U8, V_49:U16
0x3714.50: SHL V_49:U16, 0x3:U8, V_50:U16
0x3714.51: SHR V_00:U128, V_50:U16, V_51:U128
0x3714.52: OR V_51:U128, 0x0:U128, V_52:U8
0x3714.53: SUB 0x0:U8, V_47:U8, V_53:U8
0x3714.54: NOT V_53:U8, _, V_54:U8
0x3714.55: AND V_52:U8, V_54:U8, V_55:U8
0x3714.56: OR V_55:U8, 0x0:U8, V_56:U128
0x3714.57: SHL V_56:U128, 0x18:U16, V_57:U128
0x3714.58: OR V_43:U128, V_57:U128, V_58:U128
0x3714.59: SHR V_01:U128, 0x20:U16, V_59:U128
0x3714.60: OR V_59:U128, 0x0:U128, V_60:U8
0x3714.61: SHR V_60:U8, 0x7:U8, V_61:U8
0x3714.62: AND V_61:U8, 0x1:U8, V_62:U8
0x3714.63: AND V_60:U8, 0xf:U8, V_63:U8
0x3714.64: OR V_63:U8, 0x0:U8, V_64:U16
0x3714.65: SHL V_64:U16, 0x3:U8, V_65:U16
0x3714.66: SHR V_00:U128, V_65:U16, V_66:U128
0x3714.67: OR V_66:U128, 0x0:U128, V_67:U8
0x3714.68: SUB 0x0:U8, V_62:U8, V_68:U8
0x3714.69: NOT V_68:U8, _, V_69:U8
0x3714.70: AND V_67:U8, V_69:U8, V_70:U8
0x3714.71: OR V_70:U8, 0x0:U8, V_71:U128
0x3714.72: SHL V_71:U128, 0x20:U16, V_72:U128
0x3714.73: OR V_58:U128, V_72:U128, V_73:U128
0x3714.74: SHR V_01:U128, 0x28:U16, V_74:U128
0x3714.75: OR V_74:U128, 0x0:U128, V_75:U8
0x3714.76: SHR V_75:U8, 0x7:U8, V_76:U8
0x3714.77: AND V_76:U8, 0x1:U8, V_77:U8
0x3714.78: AND V_75:U8, 0xf:U8, V_78:U8
0x3714.79: OR V_78:U8, 0x0:U8, V_79:U16
0x3714.80: SHL V_79:U16, 0x3:U8, V_80:U16
0x3714.81: SHR V_00:U128, V_80:U16, V_81:U128
0x3714.82: OR V_81:U128, 0x0:U128, V_82:U8
0x3714.83: SUB 0x0:U8, V_77:U8, V_83:U8
0x3714.84: NOT V_83:U8, _, V_84:U8
0x3714.85: AND V_82:U8, V_84:U8, V_85:U8
0x3714.86: OR V_85:U8, 0x0:U8, V_86:U128
0x3714.87: SHL V_86:U128, 0x28:U16, V_87:U128
0x3714.88: OR V_73:U128, V_87:U128, V_88:U128
0x3714.89: SHR V_01:U128, 0x30:U16, V_89:U128
0x3714.90: OR V_89:U128, 0x0:U128, V_90:U8
0x3714.91: SHR V_90:U8, 0x7:U8, V_91:U8
0x3714.92: AND V_91:U8, 0x1:U8, V_92:U8
0x3714.93: AND V_90:U8, 0xf:U8, V_93:U8
0x3714.94: OR V_93:U8, 0x0:U8, V_94:U16
0x3714.95: SHL V_94:U16, 0x3:U8, V_95:U16
0x3714.96: SHR V_00:U128, V_95:U16, V_96:U128
0x3714.97: OR V_96:U128, 0x0:U128, V_97:U8
0x3714.98: SUB 0x0:U8, V_92:U8, V_98:U8
0x3714.99: NOT V_98:U8, _, V_99:U8
0x3714.100: AND V_97:U8, V_99:U8, V_100:U8
0x3714.101: OR V_100:U8, 0x0:U8, V_101:U128
0x3714.102: SHL V_101:U128, 0x30:U16, V_102:U128
0x3714.103: OR V_88:U128, V_102:U128, V_103:U128
0x3714.104: SHR V_01:U128, 0x38:U16, V_104:U128
0x3714.105: OR V_104:U128, 0x0:U128, V_105:U8
0x3714.106: SHR V_105:U8, 0x7:U8, V_106:U8
0x3714.107: AND V_106:U8, 0x1:U8, V_107:U8
0x3714.108: AND V_105:U8, 0xf:U8, V_108:U8
0x3714.109: OR V_108:U8, 0x0:U8, V_109:U16
0x3714.110: SHL V_109:U16, 0x3:U8, V_110:U16
0x3714.111: SHR V_00:U128, V_110:U16, V_111:U128
0x3714.112: OR V_111:U128, 0x0:U128, V_112:U8
0x3714.113: SUB 0x0:U8, V_107:U8, V_113:U8
0x3714.114: NOT V_113:U8, _, V_114:U8
0x3714.115: AND V_112:U8, V_114:U8, V_115:U8
0x3714.116: OR V_115:U8, 0x0:U8, V_116:U128
0x3714.117: SHL V_116:U128, 0x38:U16, V_117:U128
0x3714.118: OR V_103:U128, V_117:U128, V_118:U128
0x3714.119: SHR V_01:U128, 0x40:U16, V_119:U128
0x3714.120: OR V_119:U128, 0x0:U128, V_120:U8
0x3714.121: SHR V_120:U8, 0x7:U8, V_121:U8
0x3714.122: AND V_121:U8, 0x1:U8, V_122:U8
0x3714.123: AND V_120:U8, 0xf:U8, V_123:U8
0x3714.124: OR V_123:U8, 0x0:U8, V_124:U16
0x3714.125: SHL V_124:U16, 0x3:U8, V_125:U16
0x3714.126: SHR V_00:U128, V_125:U16, V_126:U128
0x3714.127: OR V_126:U128, 0x0:U128, V_127:U8
0x3714.128: SUB 0x0:U8, V_122:U8, V_128:U8
0x3714.129: NOT V_128:U8, _, V_129:U8
0x3714.130: AND V_127:U8, V_129:U8, V_130:U8
0x3714.131: OR V_130:U8, 0x0:U8, V_131:U128
0x3714.132: SHL V_131:U128, 0x40:U16, V_132:U128
0x3714.133: OR V_118:U128, V_132:U128, V_133:U128
0x3714.134: SHR V_01:U128, 0x48:U16, V_134:U128
0x3714.135: OR V_134:U128, 0x0:U128, V_135:U8
0x3714.136: SHR V_135:U8, 0x7:U8, V_136:U8
0x3714.137: AND V_136:U8, 0x1:U8, V_137:U8
0x3714.138: AND V_135:U8, 0xf:U8, V_138:U8
0x3714.139: OR V_138:U8, 0x0:U8, V_139:U16
0x3714.140: SHL V_139:U16, 0x3:U8, V_140:U16
0x3714.141: SHR V_00:U128, V_140:U16, V_141:U128
0x3714.142: OR V_141:U128, 0x0:U128, V_142:U8
0x3714.143: SUB 0x0:U8, V_137:U8, V_143:U8
0x3714.144: NOT V_143:U8, _, V_144:U8
0x3714.145: AND V_142:U8, V_144:U8, V_145:U8
0x3714.146: OR V_145:U8, 0x0:U8, V_146:U128
0x3714.147: SHL V_146:U128, 0x48:U16, V_147:U128
0x3714.148: OR V_133:U128, V_147:U128, V_148:U128
0x3714.149: SHR V_01:U128, 0x50:U16, V_149:U128
0x3714.150: OR V_149:U128, 0x0:U128, V_150:U8
0x3714.151: SHR V_150:U8, 0x7:U8, V_151:U8
0x3714.152: AND V_151:U8, 0x1:U8, V_152:U8
0x3714.153: AND V_150:U8, 0xf:U8, V_153:U8
0x3714.154: OR V_153:U8, 0x0:U8, V_154:U16
0x3714.155: SHL V_154:U16, 0x3:U8, V_155:U16
0x3714.156: SHR V_00:U128, V_155:U16, V_156:U128
0x3714.157: OR V_156:U128, 0x0:U128, V_157:U8
0x3714.158: SUB 0x0:U8, V_152:U8, V_158:U8
0x3714.159: NOT V_158:U8, _, V_159:U8
0x3714.160: AND V_157:U8, V_159:U8, V_160:U8
0x3714.161: OR V_160:U8, 0x0:U8, V_161:U128
0x3714.162: SHL V_161:U128, 0x50:U16, V_162:U128
0x3714.163: OR V_148:U128, V_162:U128, V_163:U128
0x3714.164: SHR V_01:U128, 0x58:U16, V_164:U128
0x3714.165: OR V_164:U128, 0x0:U128, V_165:U8
0x3714.166: SHR V_165:U8, 0x7:U8, V_166:U8
0x3714.167: AND V_166:U8, 0x1:U8, V_167:U8
0x3714.168: AND V_165:U8, 0xf:U8, V_168:U8
0x3714.169: OR V_168:U8, 0x0:U8, V_169:U16
0x3714.170: SHL V_169:U16, 0x3:U8, V_170:U16
0x3714.171: SHR V_00:U128, V_170:U16, V_171:U128
0x3714.172: OR V_171:U128, 0x0:U128, V_172:U8
0x3714.173: SUB 0x0:U8, V_167:U8, V_173:U8
0x3714.174: NOT V_173:U8, _, V_174:U8
0x3714.175: AND V_172:U8, V_174:U8, V_175:U8
0x3714.176: OR V_175:U8, 0x0:U8, V_176:U128
0x3714.177: SHL V_176:U128, 0x58:U16, V_177:U128
0x3714.178: OR V_163:U128, V_177:U128, V_178:U128
0x3714.179: SHR V_01:U128, 0x60:U16, V_179:U128
0x3714.180: OR V_179:U128, 0x0:U128, V_180:U8
0x3714.181: SHR V_180:U8, 0x7:U8, V_181:U8
0x3714.182: AND V_181:U8, 0x1:U8, V_182:U8
0x3714.183: AND V_180:U8, 0xf:U8, V_183:U8
0x3714.184: OR V_183:U8, 0x0:U8, V_184:U16
0x3714.185: SHL V_184:U16, 0x3:U8, V_185:U16
0x3714.186: SHR V_00:U128, V_185:U16, V_186:U128
0x3714.187: OR V_186:U128, 0x0:U128, V_187:U8
0x3714.188: SUB 0x0:U8, V_182:U8, V_188:U8
0x3714.189: NOT V_188:U8, _, V_189:U8
0x3714.190: AND V_187:U8, V_189:U8, V_190:U8
0x3714.191: OR V_190:U8, 0x0:U8, V_191:U128
0x3714.192: SHL V_191:U128, 0x60:U16, V_192:U128
0x3714.193: OR V_178:U128, V_192:U128, V_193:U128
0x3714.194: SHR V_01:U128, 0x68:U16, V_194:U128
0x3714.195: OR V_194:U128, 0x0:U128, V_195:U8
0x3714.196: SHR V_195:U8, 0x7:U8, V_196:U8
0x3714.197: AND V_196:U8, 0x1:U8, V_197:U8
0x3714.198: AND V_195:U8, 0xf:U8, V_198:U8
0x3714.199: OR V_198:U8, 0x0:U8, V_199:U16
0x3714.200: SHL V_199:U16, 0x3:U8, V_200:U16
0x3714.201: SHR V_00:U128, V_200:U16, V_201:U128
0x3714.202: OR V_201:U128, 0x0:U128, V_202:U8
0x3714.203: SUB 0x0:U8, V_197:U8, V_203:U8
0x3714.204: NOT V_203:U8, _, V_204:U8
0x3714.205: AND V_202:U8, V_204:U8, V_205:U8
0x3714.206: OR V_205:U8, 0x0:U8, V_206:U128
0x3714.207: SHL V_206:U128, 0x68:U16, V_207:U128
0x3714.208: OR V_193:U128, V_207:U128, V_208:U128
0x3714.209: SHR V_01:U128, 0x70:U16, V_209:U128
0x3714.210: OR V_209:U128, 0x0:U128, V_210:U8
0x3714.211: SHR V_210:U8, 0x7:U8, V_211:U8
0x3714.212: AND V_211:U8, 0x1:U8, V_212:U8
0x3714.213: AND V_210:U8, 0xf:U8, V_213:U8
0x3714.214: OR V_213:U8, 0x0:U8, V_214:U16
0x3714.215: SHL V_214:U16, 0x3:U8, V_215:U16
0x3714.216: SHR V_00:U128, V_215:U16, V_216:U128
0x3714.217: OR V_216:U128, 0x0:U128, V_217:U8
0x3714.218: SUB 0x0:U8, V_212:U8, V_218:U8
0x3714.219: NOT V_218:U8, _, V_219:U8
0x3714.220: AND V_217:U8, V_219:U8, V_220:U8
0x3714.221: OR V_220:U8, 0x0:U8, V_221:U128
0x3714.222: SHL V_221:U128, 0x70:U16, V_222:U128
0x3714.223: OR V_208:U128, V_222:U128, V_223:U128
0x3714.224: SHR V_01:U128, 0x78:U16, V_224:U128
0x3714.225: OR V_224:U128, 0x0:U128, V_225:U8
0x3714.226: SHR V_225:U8, 0x7:U8, V_226:U8
0x3714.227: AND V_226:U8, 0x1:U8, V_227:U8
0x3714.228: AND V_225:U8, 0xf:U8, V_228:U8
0x3714.229: OR V_228:U8, 0x0:U8, V_229:U16
0x3714.230: SHL V_229:U16, 0x3:U8, V_230:U16
0x3714.231: SHR V_00:U128, V_230:U16, V_231:U128
0x3714.232: OR V_231:U128, 0x0:U128, V_232:U8
0x3714.233: SUB 0x0:U8, V_227:U8, V_233:U8
0x3714.234: NOT V_233:U8, _, V_234:U8
0x3714.235: AND V_232:U8, V_234:U8, V_235:U8
0x3714.236: OR V_235:U8, 0x0:U8, V_236:U128
0x3714.237: SHL V_236:U128, 0x78:U16, V_237:U128
0x3714.238: OR V_223:U128, V_237:U128, V_238:U128
0x3714.239: OR V_238:U128, 0x0:U512, V_239:U128
0x3714.240: OR V_239:U128, 0x0:U128, R_ZMM0:U512 ASM_END
; 0x3719: vpternlogq xmm0, xmm4, xmmword ptr [rbp + rax - 0x120], 0x96 (9 bytes)
0x3719.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x3719.1: OR R_ZMM4:U512, 0x0:U512, V_01:U128
0x3719.2: ADD R_RBP:U64, R_RAX:U64, V_02:U64
0x3719.3: ADD V_02:U64, 0xfffffffffffffee0:U64, V_03:U64
0x3719.4: LDM V_03:U64, _, V_04:U128
0x3719.5: NOT V_00:U128, _, V_05:U128
0x3719.6: NOT V_01:U128, _, V_06:U128
0x3719.7: NOT V_04:U128, _, V_07:U128
0x3719.8: AND V_05:U128, V_06:U128, V_08:U128
0x3719.9: AND V_08:U128, V_04:U128, V_09:U128
0x3719.10: AND V_05:U128, V_01:U128, V_10:U128
0x3719.11: AND V_10:U128, V_07:U128, V_11:U128
0x3719.12: OR V_09:U128, V_11:U128, V_12:U128
0x3719.13: AND V_00:U128, V_06:U128, V_13:U128
0x3719.14: AND V_13:U128, V_07:U128, V_14:U128
0x3719.15: OR V_12:U128, V_14:U128, V_15:U128
0x3719.16: AND V_00:U128, V_01:U128, V_16:U128
0x3719.17: AND V_16:U128, V_04:U128, V_17:U128
0x3719.18: OR V_15:U128, V_17:U128, V_18:U128
0x3719.19: OR V_18:U128, 0x0:U512, V_19:U128
0x3719.20: OR V_19:U128, 0x0:U128, R_ZMM0:U512 ASM_END
; 0x3722: add rax, 0x10 (4 bytes)
0x3722.0: ADD R_RAX:U64, 0x10:U64, V_00:U64
0x3722.26: STR V_00:U64, _, R_RAX:U64 ASM_END
; 0x3726: cmp rax, 0xa0 (6 bytes)
0x3726.0: SUB R_RAX:U64, 0xa0:U64, V_00:U64
0x3726.2: EQ V_00:U64, 0x0:U64, R_ZF:U1
; 0x372c: jne 0x3370 (6 bytes)
0x372c.0: XOR R_ZF:U1, 0x1:U1, V_00:U1
0x372c.1: JCC V_00:U1, _, 0x3370:U64 ASM_END BB_END
BB 0x3732 (RET)
; 0x3732: vpextrb eax, xmm0, 0 (6 bytes)
0x3732.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x3732.1: OR V_00:U128, 0x0:U128, V_01:U8
0x3732.2: OR V_01:U8, 0x0:U8, V_02:U32
0x3732.3: OR V_02:U32, 0x0:U64, V_03:U32
0x3732.4: OR V_03:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x3738: lea rcx, [rip + 0x651] (7 bytes)
0x3738.0: STR 0x3d90:U64, _, R_RCX:U64 ASM_END
; 0x373f: movzx eax, byte ptr [rax + rcx] (4 bytes)
0x373f.0: ADD R_RAX:U64, 0x3d90:U64, V_00:U64
0x373f.1: LDM V_00:U64, _, V_01:U8
0x373f.2: OR V_01:U8, 0x0:U8, V_02:U32
0x373f.3: OR V_02:U32, 0x0:U64, V_03:U32
0x373f.4: OR V_03:U32, 0x0:U32, R_RAX:U64 ASM_END
; 0x3743: vmovd xmm1, eax (4 bytes)
0x3743.0: AND R_RAX:U64, 0xffffffff:U64, V_00:U32
0x3743.1: OR V_00:U32, 0x0:U32, V_01:U32
0x3743.2: OR V_01:U32, 0x0:U32, V_02:U128
0x3743.3: OR V_02:U128, 0x0:U512, V_03:U128
0x3743.4: OR V_03:U128, 0x0:U128, R_ZMM1:U512 ASM_END
; 0x380a: vpxor xmm0, xmm0, xmmword ptr [rbp - 0x80] (5 bytes)
0x380a.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x380a.1: ADD R_RBP:U64, 0xffffffffffffff80:U64, V_01:U64
0x380a.2: LDM V_01:U64, _, V_02:U128
0x380a.3: XOR V_00:U128, V_02:U128, V_03:U128
0x380a.4: OR V_03:U128, 0x0:U512, V_04:U128
0x380a.5: OR V_04:U128, 0x0:U128, R_ZMM0:U512 ASM_END
; 0x380f: mov rax, qword ptr [rbp - 0x58] (4 bytes)
0x380f.0: ADD R_RBP:U64, 0xffffffffffffffa8:U64, V_00:U64
0x380f.1: LDM V_00:U64, _, V_01:U64
0x380f.2: STR V_01:U64, _, R_RAX:U64 ASM_END
; 0x3813: vmovdqu xmmword ptr [rax], xmm0 (4 bytes)
0x3813.0: OR R_ZMM0:U512, 0x0:U512, V_00:U128
0x3813.1: STM V_00:U128, _, R_RAX:U64 ASM_END
; 0x3817: add rsp, 0xf8 (7 bytes)
0x3817.0: ADD R_RSP:U64, 0xf8:U64, V_00:U64
0x3817.26: STR V_00:U64, _, R_RSP:U64 ASM_END
; 0x381e: pop rbx (1 bytes)
0x381e.0: LDM R_RSP:U64, _, V_00:U64
0x381e.1: STR V_00:U64, _, R_RBX:U64
0x381e.2: ADD R_RSP:U64, 0x8:U64, V_01:U64
0x381e.3: STR V_01:U64, _, R_RSP:U64 ASM_END
; 0x381f: pop r12 (2 bytes)
0x381f.0: LDM R_RSP:U64, _, V_00:U64
0x381f.1: STR V_00:U64, _, R_R12:U64
0x381f.2: ADD R_RSP:U64, 0x8:U64, V_01:U64
0x381f.3: STR V_01:U64, _, R_RSP:U64 ASM_END
; 0x3821: pop r13 (2 bytes)
0x3821.0: LDM R_RSP:U64, _, V_00:U64
0x3821.1: STR V_00:U64, _, R_R13:U64
0x3821.2: ADD R_RSP:U64, 0x8:U64, V_01:U64
0x3821.3: STR V_01:U64, _, R_RSP:U64 ASM_END
; 0x3823: pop r14 (2 bytes)
0x3823.0: LDM R_RSP:U64, _, V_00:U64
0x3823.1: STR V_00:U64, _, R_R14:U64
0x3823.2: ADD R_RSP:U64, 0x8:U64, V_01:U64
0x3823.3: STR V_01:U64, _, R_RSP:U64 ASM_END
; 0x3825: pop r15 (2 bytes)
0x3825.0: LDM R_RSP:U64, _, V_00:U64
0x3825.1: STR V_00:U64, _, R_R15:U64
0x3825.2: ADD R_RSP:U64, 0x8:U64, V_01:U64
0x3825.3: STR V_01:U64, _, R_RSP:U64 ASM_END
; 0x3827: pop rbp (1 bytes)
0x3827.0: LDM R_RSP:U64, _, V_00:U64
0x3827.1: STR V_00:U64, _, R_RBP:U64
0x3827.2: ADD R_RSP:U64, 0x8:U64, V_01:U64
0x3827.3: STR V_01:U64, _, R_RSP:U64 ASM_END
; 0x3828: ret (1 bytes)
0x3828.0: LDM R_RSP:U64, _, V_00:U64
0x3828.1: ADD R_RSP:U64, 0x8:U64, V_01:U64
0x3828.2: STR V_01:U64, _, R_RSP:U64
0x3828.3: JCC 0x1:U1, _, V_00:U64 ASM_END BB_END RET
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment