mirror of
https://github.com/hedge-dev/XenonRecomp.git
synced 2025-06-06 18:31:03 +00:00
153 lines
5.9 KiB
C#
153 lines
5.9 KiB
C#
# CS_ARCH_ARM, CS_MODE_ARM, None
|
|
0xb0,0x01,0x41,0xf2 = vand d16, d17, d16
|
|
0xf2,0x01,0x40,0xf2 = vand q8, q8, q9
|
|
0xb0,0x01,0x41,0xf3 = veor d16, d17, d16
|
|
0xf2,0x01,0x40,0xf3 = veor q8, q8, q9
|
|
0xb0,0x01,0x61,0xf2 = vorr d16, d17, d16
|
|
0xf2,0x01,0x60,0xf2 = vorr q8, q8, q9
|
|
0x11,0x07,0xc0,0xf2 = vorr.i32 d16, #0x1000000
|
|
0x51,0x07,0xc0,0xf2 = vorr.i32 q8, #0x1000000
|
|
0x50,0x01,0xc0,0xf2 = vorr.i32 q8, #0x0
|
|
0xb0,0x01,0x51,0xf2 = vbic d16, d17, d16
|
|
0xf2,0x01,0x50,0xf2 = vbic q8, q8, q9
|
|
0xf6,0x41,0x54,0xf2 = vbic q10, q10, q11
|
|
0x11,0x91,0x19,0xf2 = vbic d9, d9, d1
|
|
0x3f,0x0b,0xc7,0xf3 = vbic.i16 d16, #0xff00
|
|
0x7f,0x0b,0xc7,0xf3 = vbic.i16 q8, #0xff00
|
|
0x3f,0x09,0xc7,0xf3 = vbic.i16 d16, #0xff
|
|
0x7f,0x09,0xc7,0xf3 = vbic.i16 q8, #0xff
|
|
0x3f,0x07,0xc7,0xf3 = vbic.i32 d16, #0xff000000
|
|
0x7f,0x07,0xc7,0xf3 = vbic.i32 q8, #0xff000000
|
|
0x3f,0x05,0xc7,0xf3 = vbic.i32 d16, #0xff0000
|
|
0x7f,0x05,0xc7,0xf3 = vbic.i32 q8, #0xff0000
|
|
0x3f,0x03,0xc7,0xf3 = vbic.i32 d16, #0xff00
|
|
0x7f,0x03,0xc7,0xf3 = vbic.i32 q8, #0xff00
|
|
0x3f,0x01,0xc7,0xf3 = vbic.i32 d16, #0xff
|
|
0x7f,0x01,0xc7,0xf3 = vbic.i32 q8, #0xff
|
|
0x3c,0xa9,0x87,0xf3 = vbic.i16 d10, #0xfc
|
|
0x7c,0x49,0xc7,0xf3 = vbic.i16 q10, #0xfc
|
|
0x3c,0xab,0x87,0xf3 = vbic.i16 d10, #0xfc00
|
|
0x7c,0x4b,0xc7,0xf3 = vbic.i16 q10, #0xfc00
|
|
0x3c,0xa7,0x87,0xf3 = vbic.i32 d10, #0xfc000000
|
|
0x7c,0x47,0xc7,0xf3 = vbic.i32 q10, #0xfc000000
|
|
0x3c,0xa5,0x87,0xf3 = vbic.i32 d10, #0xfc0000
|
|
0x7c,0x45,0xc7,0xf3 = vbic.i32 q10, #0xfc0000
|
|
0x3c,0xa3,0x87,0xf3 = vbic.i32 d10, #0xfc00
|
|
0x7c,0x43,0xc7,0xf3 = vbic.i32 q10, #0xfc00
|
|
0x3c,0xa1,0x87,0xf3 = vbic.i32 d10, #0xfc
|
|
0x7c,0x41,0xc7,0xf3 = vbic.i32 q10, #0xfc
|
|
0xb0,0x01,0x71,0xf2 = vorn d16, d17, d16
|
|
0xf2,0x01,0x70,0xf2 = vorn q8, q8, q9
|
|
0xa0,0x05,0xf0,0xf3 = vmvn d16, d16
|
|
0xe0,0x05,0xf0,0xf3 = vmvn q8, q8
|
|
0xb0,0x21,0x51,0xf3 = vbsl d18, d17, d16
|
|
0xf2,0x01,0x54,0xf3 = vbsl q8, q10, q9
|
|
0xb0,0x21,0x61,0xf3 = vbit d18, d17, d16
|
|
0xf2,0x01,0x64,0xf3 = vbit q8, q10, q9
|
|
0xb0,0x21,0x71,0xf3 = vbif d18, d17, d16
|
|
0xf2,0x01,0x74,0xf3 = vbif q8, q10, q9
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x56,0x81,0x0e,0xf3 = veor q4, q7, q3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x07,0xf2 = vand d4, d7, d3
|
|
0x13,0x41,0x27,0xf2 = vorr d4, d7, d3
|
|
0x13,0x41,0x27,0xf2 = vorr d4, d7, d3
|
|
0x13,0x41,0x27,0xf2 = vorr d4, d7, d3
|
|
0x13,0x41,0x27,0xf2 = vorr d4, d7, d3
|
|
0x13,0x41,0x27,0xf2 = vorr d4, d7, d3
|
|
0x13,0x41,0x27,0xf2 = vorr d4, d7, d3
|
|
0x13,0x41,0x27,0xf2 = vorr d4, d7, d3
|
|
0x13,0x41,0x27,0xf2 = vorr d4, d7, d3
|
|
0x13,0x41,0x27,0xf2 = vorr d4, d7, d3
|
|
0x13,0x41,0x27,0xf2 = vorr d4, d7, d3
|
|
0x13,0x41,0x27,0xf2 = vorr d4, d7, d3
|
|
0x56,0x81,0x2e,0xf2 = vorr q4, q7, q3
|
|
0x56,0x81,0x2e,0xf2 = vorr q4, q7, q3
|
|
0x56,0x81,0x2e,0xf2 = vorr q4, q7, q3
|
|
0x56,0x81,0x2e,0xf2 = vorr q4, q7, q3
|
|
0x56,0x81,0x2e,0xf2 = vorr q4, q7, q3
|
|
0x56,0x81,0x2e,0xf2 = vorr q4, q7, q3
|
|
0x56,0x81,0x2e,0xf2 = vorr q4, q7, q3
|
|
0x56,0x81,0x2e,0xf2 = vorr q4, q7, q3
|
|
0x56,0x81,0x2e,0xf2 = vorr q4, q7, q3
|
|
0x56,0x81,0x2e,0xf2 = vorr q4, q7, q3
|
|
0x56,0x81,0x2e,0xf2 = vorr q4, q7, q3
|
|
0x56,0x81,0x2e,0xf2 = vorr q4, q7, q3
|
|
0x5a,0xc1,0x0c,0xf2 = vand q6, q6, q5
|
|
0x5a,0xc1,0x0c,0xf2 = vand q6, q6, q5
|
|
0x52,0xe1,0x0e,0xf2 = vand q7, q7, q1
|
|
0xd4,0x01,0x40,0xf2 = vand q8, q8, q2
|
|
0xd4,0x01,0x40,0xf2 = vand q8, q8, q2
|
|
0x5a,0xc1,0x0c,0xf3 = veor q6, q6, q5
|
|
0x5a,0xc1,0x0c,0xf3 = veor q6, q6, q5
|
|
0x52,0xe1,0x0e,0xf3 = veor q7, q7, q1
|
|
0xd4,0x01,0x40,0xf3 = veor q8, q8, q2
|
|
0xd4,0x01,0x40,0xf3 = veor q8, q8, q2
|
|
0x5a,0xc1,0x0c,0xf3 = veor q6, q6, q5
|
|
0x5a,0xc1,0x0c,0xf3 = veor q6, q6, q5
|
|
0x52,0xe1,0x0e,0xf3 = veor q7, q7, q1
|
|
0xd4,0x01,0x40,0xf3 = veor q8, q8, q2
|
|
0xd4,0x01,0x40,0xf3 = veor q8, q8, q2
|
|
0x4a,0xa2,0xb5,0xf3 = vclt.s16 q5, q5, #0
|
|
0x05,0x52,0xb5,0xf3 = vclt.s16 d5, d5, #0
|
|
0x56,0xa8,0x1a,0xf3 = vceq.i16 q5, q5, q3
|
|
0x13,0x58,0x15,0xf3 = vceq.i16 d5, d5, d3
|
|
0x46,0xa3,0x1a,0xf2 = vcgt.s16 q5, q5, q3
|
|
0x03,0x53,0x15,0xf2 = vcgt.s16 d5, d5, d3
|
|
0x56,0xa3,0x1a,0xf2 = vcge.s16 q5, q5, q3
|
|
0x13,0x53,0x15,0xf2 = vcge.s16 d5, d5, d3
|
|
0x4a,0xa0,0xb5,0xf3 = vcgt.s16 q5, q5, #0
|
|
0x05,0x50,0xb5,0xf3 = vcgt.s16 d5, d5, #0
|
|
0xca,0xa0,0xb5,0xf3 = vcge.s16 q5, q5, #0
|
|
0x85,0x50,0xb5,0xf3 = vcge.s16 d5, d5, #0
|
|
0x4a,0xa1,0xb5,0xf3 = vceq.i16 q5, q5, #0
|
|
0x05,0x51,0xb5,0xf3 = vceq.i16 d5, d5, #0
|
|
0xca,0xa1,0xb5,0xf3 = vcle.s16 q5, q5, #0
|
|
0x85,0x51,0xb5,0xf3 = vcle.s16 d5, d5, #0
|
|
0x3e,0x5e,0x05,0xf3 = vacge.f32 d5, d5, d30
|
|
0x56,0xae,0x0a,0xf3 = vacge.f32 q5, q5, q3
|
|
0x3e,0x5e,0x25,0xf3 = vacgt.f32 d5, d5, d30
|
|
0x56,0xae,0x2a,0xf3 = vacgt.f32 q5, q5, q3
|