[<prev] [next>] [<thread-prev] [thread-next>] [day] [month] [year] [list]
Message-ID: <55E4A3E9.8020707@intel.com>
Date: Mon, 31 Aug 2015 21:58:49 +0300
From: Adrian Hunter <adrian.hunter@...el.com>
To: Arnaldo Carvalho de Melo <acme@...nel.org>
Cc: linux-kernel@...r.kernel.org, Jiri Olsa <jolsa@...hat.com>,
Andy Lutomirski <luto@...capital.net>,
Masami Hiramatsu <masami.hiramatsu.pt@...achi.com>,
Denys Vlasenko <dvlasenk@...hat.com>,
Peter Zijlstra <peterz@...radead.org>,
Ingo Molnar <mingo@...nel.org>,
Dave Hansen <dave.hansen@...ux.intel.com>,
Qiaowei Ren <qiaowei.ren@...el.com>,
"H. Peter Anvin" <hpa@...or.com>,
Thomas Gleixner <tglx@...utronix.de>
Subject: Re: [PATCH 3/4] x86/insn: perf tools: Add new SHA instructions
On 31/08/2015 5:50 p.m., Arnaldo Carvalho de Melo wrote:
> Em Mon, Aug 31, 2015 at 04:58:41PM +0300, Adrian Hunter escreveu:
>> Intel SHA Extensions are explained in the Intel Architecture
>> Instruction Set Extensions Programing Reference (Oct 2014).
>> There are 7 new instructions. Add them to the op code map
>> and the perf tools new instructions test. e.g.
>>
>> $ tools/perf/perf test list 2>&1 | grep "x86 ins"
>
> I.e., one could short circuit the 'perf test list' step and use:
>
> perf test "x86 ins" straight away:
>
> [root@zoo linux]# perf test "syscall event"
> 2: detect openat syscall event : Ok
> 3: detect openat syscall event on all cpus : Ok
> [root@zoo linux]#
Cool, I'll update the commit messages in V2
>
>> 39: Test x86 instruction decoder - new instructions
>> $ tools/perf/perf test 39
>> 39: Test x86 instruction decoder - new instructions : Ok
>>
>> Or to see the details:
>>
>> $ tools/perf/perf test -v 39 2>&1 | grep sha
>>
>> Signed-off-by: Adrian Hunter <adrian.hunter@...el.com>
>> ---
>> arch/x86/lib/x86-opcode-map.txt | 7 +
>> tools/perf/tests/insn-x86-dat-32.c | 294 ++++++++++++++++
>> tools/perf/tests/insn-x86-dat-64.c | 364 ++++++++++++++++++++
>> tools/perf/tests/insn-x86-dat-src.c | 373 +++++++++++++++++++++
>> .../perf/util/intel-pt-decoder/x86-opcode-map.txt | 7 +
>> 5 files changed, 1045 insertions(+)
>>
>> diff --git a/arch/x86/lib/x86-opcode-map.txt b/arch/x86/lib/x86-opcode-map.txt
>> index a02a195d219c..25dad388b371 100644
>> --- a/arch/x86/lib/x86-opcode-map.txt
>> +++ b/arch/x86/lib/x86-opcode-map.txt
>> @@ -736,6 +736,12 @@ bd: vfnmadd231ss/d Vx,Hx,Wx (66),(v),(v1)
>> be: vfnmsub231ps/d Vx,Hx,Wx (66),(v)
>> bf: vfnmsub231ss/d Vx,Hx,Wx (66),(v),(v1)
>> # 0x0f 0x38 0xc0-0xff
>> +c8: sha1nexte Vdq,Wdq
>> +c9: sha1msg1 Vdq,Wdq
>> +ca: sha1msg2 Vdq,Wdq
>> +cb: sha256rnds2 Vdq,Wdq
>> +cc: sha256msg1 Vdq,Wdq
>> +cd: sha256msg2 Vdq,Wdq
>> db: VAESIMC Vdq,Wdq (66),(v1)
>> dc: VAESENC Vdq,Hdq,Wdq (66),(v1)
>> dd: VAESENCLAST Vdq,Hdq,Wdq (66),(v1)
>> @@ -794,6 +800,7 @@ AVXcode: 3
>> 61: vpcmpestri Vdq,Wdq,Ib (66),(v1)
>> 62: vpcmpistrm Vdq,Wdq,Ib (66),(v1)
>> 63: vpcmpistri Vdq,Wdq,Ib (66),(v1)
>> +cc: sha1rnds4 Vdq,Wdq,Ib
>> df: VAESKEYGEN Vdq,Wdq,Ib (66),(v1)
>> f0: RORX Gy,Ey,Ib (F2),(v)
>> EndTable
>> diff --git a/tools/perf/tests/insn-x86-dat-32.c b/tools/perf/tests/insn-x86-dat-32.c
>> index 6a38a34a5a49..83f5078e74e1 100644
>> --- a/tools/perf/tests/insn-x86-dat-32.c
>> +++ b/tools/perf/tests/insn-x86-dat-32.c
>> @@ -322,3 +322,297 @@
>> "f2 ff 21 \tbnd jmp *(%ecx)",},
>> {{0xf2, 0x0f, 0x85, 0xfc, 0xff, 0xff, 0xff, }, 7, 0xfffffffc, "jcc", "conditional",
>> "f2 0f 85 fc ff ff ff \tbnd jne 3de <main+0x3de>",},
>> +{{0x0f, 0x3a, 0xcc, 0xc1, 0x00, }, 5, 0, "", "",
>> +"0f 3a cc c1 00 \tsha1rnds4 $0x0,%xmm1,%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0xd7, 0x91, }, 5, 0, "", "",
>> +"0f 3a cc d7 91 \tsha1rnds4 $0x91,%xmm7,%xmm2",},
>> +{{0x0f, 0x3a, 0xcc, 0x00, 0x91, }, 5, 0, "", "",
>> +"0f 3a cc 00 91 \tsha1rnds4 $0x91,(%eax),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x05, 0x78, 0x56, 0x34, 0x12, 0x91, }, 9, 0, "", "",
>> +"0f 3a cc 05 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678,%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x18, 0x91, }, 5, 0, "", "",
>> +"0f 3a cc 18 91 \tsha1rnds4 $0x91,(%eax),%xmm3",},
>> +{{0x0f, 0x3a, 0xcc, 0x04, 0x01, 0x91, }, 6, 0, "", "",
>> +"0f 3a cc 04 01 91 \tsha1rnds4 $0x91,(%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x04, 0x05, 0x78, 0x56, 0x34, 0x12, 0x91, }, 10, 0, "", "",
>> +"0f 3a cc 04 05 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(,%eax,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x04, 0x08, 0x91, }, 6, 0, "", "",
>> +"0f 3a cc 04 08 91 \tsha1rnds4 $0x91,(%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x04, 0xc8, 0x91, }, 6, 0, "", "",
>> +"0f 3a cc 04 c8 91 \tsha1rnds4 $0x91,(%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x40, 0x12, 0x91, }, 6, 0, "", "",
>> +"0f 3a cc 40 12 91 \tsha1rnds4 $0x91,0x12(%eax),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x45, 0x12, 0x91, }, 6, 0, "", "",
>> +"0f 3a cc 45 12 91 \tsha1rnds4 $0x91,0x12(%ebp),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x44, 0x01, 0x12, 0x91, }, 7, 0, "", "",
>> +"0f 3a cc 44 01 12 91 \tsha1rnds4 $0x91,0x12(%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x44, 0x05, 0x12, 0x91, }, 7, 0, "", "",
>> +"0f 3a cc 44 05 12 91 \tsha1rnds4 $0x91,0x12(%ebp,%eax,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x44, 0x08, 0x12, 0x91, }, 7, 0, "", "",
>> +"0f 3a cc 44 08 12 91 \tsha1rnds4 $0x91,0x12(%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x44, 0xc8, 0x12, 0x91, }, 7, 0, "", "",
>> +"0f 3a cc 44 c8 12 91 \tsha1rnds4 $0x91,0x12(%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x80, 0x78, 0x56, 0x34, 0x12, 0x91, }, 9, 0, "", "",
>> +"0f 3a cc 80 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(%eax),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x85, 0x78, 0x56, 0x34, 0x12, 0x91, }, 9, 0, "", "",
>> +"0f 3a cc 85 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(%ebp),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x84, 0x01, 0x78, 0x56, 0x34, 0x12, 0x91, }, 10, 0, "", "",
>> +"0f 3a cc 84 01 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x84, 0x05, 0x78, 0x56, 0x34, 0x12, 0x91, }, 10, 0, "", "",
>> +"0f 3a cc 84 05 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(%ebp,%eax,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, 0x91, }, 10, 0, "", "",
>> +"0f 3a cc 84 08 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x84, 0xc8, 0x78, 0x56, 0x34, 0x12, 0x91, }, 10, 0, "", "",
>> +"0f 3a cc 84 c8 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0xc1, }, 4, 0, "", "",
>> +"0f 38 c8 c1 \tsha1nexte %xmm1,%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0xd7, }, 4, 0, "", "",
>> +"0f 38 c8 d7 \tsha1nexte %xmm7,%xmm2",},
>> +{{0x0f, 0x38, 0xc8, 0x00, }, 4, 0, "", "",
>> +"0f 38 c8 00 \tsha1nexte (%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x05, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 c8 05 78 56 34 12 \tsha1nexte 0x12345678,%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x18, }, 4, 0, "", "",
>> +"0f 38 c8 18 \tsha1nexte (%eax),%xmm3",},
>> +{{0x0f, 0x38, 0xc8, 0x04, 0x01, }, 5, 0, "", "",
>> +"0f 38 c8 04 01 \tsha1nexte (%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x04, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c8 04 05 78 56 34 12 \tsha1nexte 0x12345678(,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x04, 0x08, }, 5, 0, "", "",
>> +"0f 38 c8 04 08 \tsha1nexte (%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x04, 0xc8, }, 5, 0, "", "",
>> +"0f 38 c8 04 c8 \tsha1nexte (%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x40, 0x12, }, 5, 0, "", "",
>> +"0f 38 c8 40 12 \tsha1nexte 0x12(%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x45, 0x12, }, 5, 0, "", "",
>> +"0f 38 c8 45 12 \tsha1nexte 0x12(%ebp),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x44, 0x01, 0x12, }, 6, 0, "", "",
>> +"0f 38 c8 44 01 12 \tsha1nexte 0x12(%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x44, 0x05, 0x12, }, 6, 0, "", "",
>> +"0f 38 c8 44 05 12 \tsha1nexte 0x12(%ebp,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x44, 0x08, 0x12, }, 6, 0, "", "",
>> +"0f 38 c8 44 08 12 \tsha1nexte 0x12(%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x44, 0xc8, 0x12, }, 6, 0, "", "",
>> +"0f 38 c8 44 c8 12 \tsha1nexte 0x12(%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x80, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 c8 80 78 56 34 12 \tsha1nexte 0x12345678(%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x85, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 c8 85 78 56 34 12 \tsha1nexte 0x12345678(%ebp),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x84, 0x01, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c8 84 01 78 56 34 12 \tsha1nexte 0x12345678(%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x84, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c8 84 05 78 56 34 12 \tsha1nexte 0x12345678(%ebp,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c8 84 08 78 56 34 12 \tsha1nexte 0x12345678(%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x84, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c8 84 c8 78 56 34 12 \tsha1nexte 0x12345678(%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0xc1, }, 4, 0, "", "",
>> +"0f 38 c9 c1 \tsha1msg1 %xmm1,%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0xd7, }, 4, 0, "", "",
>> +"0f 38 c9 d7 \tsha1msg1 %xmm7,%xmm2",},
>> +{{0x0f, 0x38, 0xc9, 0x00, }, 4, 0, "", "",
>> +"0f 38 c9 00 \tsha1msg1 (%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x05, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 c9 05 78 56 34 12 \tsha1msg1 0x12345678,%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x18, }, 4, 0, "", "",
>> +"0f 38 c9 18 \tsha1msg1 (%eax),%xmm3",},
>> +{{0x0f, 0x38, 0xc9, 0x04, 0x01, }, 5, 0, "", "",
>> +"0f 38 c9 04 01 \tsha1msg1 (%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x04, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c9 04 05 78 56 34 12 \tsha1msg1 0x12345678(,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x04, 0x08, }, 5, 0, "", "",
>> +"0f 38 c9 04 08 \tsha1msg1 (%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x04, 0xc8, }, 5, 0, "", "",
>> +"0f 38 c9 04 c8 \tsha1msg1 (%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x40, 0x12, }, 5, 0, "", "",
>> +"0f 38 c9 40 12 \tsha1msg1 0x12(%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x45, 0x12, }, 5, 0, "", "",
>> +"0f 38 c9 45 12 \tsha1msg1 0x12(%ebp),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x44, 0x01, 0x12, }, 6, 0, "", "",
>> +"0f 38 c9 44 01 12 \tsha1msg1 0x12(%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x44, 0x05, 0x12, }, 6, 0, "", "",
>> +"0f 38 c9 44 05 12 \tsha1msg1 0x12(%ebp,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x44, 0x08, 0x12, }, 6, 0, "", "",
>> +"0f 38 c9 44 08 12 \tsha1msg1 0x12(%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x44, 0xc8, 0x12, }, 6, 0, "", "",
>> +"0f 38 c9 44 c8 12 \tsha1msg1 0x12(%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x80, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 c9 80 78 56 34 12 \tsha1msg1 0x12345678(%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x85, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 c9 85 78 56 34 12 \tsha1msg1 0x12345678(%ebp),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x84, 0x01, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c9 84 01 78 56 34 12 \tsha1msg1 0x12345678(%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x84, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c9 84 05 78 56 34 12 \tsha1msg1 0x12345678(%ebp,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c9 84 08 78 56 34 12 \tsha1msg1 0x12345678(%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x84, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c9 84 c8 78 56 34 12 \tsha1msg1 0x12345678(%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0xc1, }, 4, 0, "", "",
>> +"0f 38 ca c1 \tsha1msg2 %xmm1,%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0xd7, }, 4, 0, "", "",
>> +"0f 38 ca d7 \tsha1msg2 %xmm7,%xmm2",},
>> +{{0x0f, 0x38, 0xca, 0x00, }, 4, 0, "", "",
>> +"0f 38 ca 00 \tsha1msg2 (%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x05, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 ca 05 78 56 34 12 \tsha1msg2 0x12345678,%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x18, }, 4, 0, "", "",
>> +"0f 38 ca 18 \tsha1msg2 (%eax),%xmm3",},
>> +{{0x0f, 0x38, 0xca, 0x04, 0x01, }, 5, 0, "", "",
>> +"0f 38 ca 04 01 \tsha1msg2 (%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x04, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 ca 04 05 78 56 34 12 \tsha1msg2 0x12345678(,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x04, 0x08, }, 5, 0, "", "",
>> +"0f 38 ca 04 08 \tsha1msg2 (%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x04, 0xc8, }, 5, 0, "", "",
>> +"0f 38 ca 04 c8 \tsha1msg2 (%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x40, 0x12, }, 5, 0, "", "",
>> +"0f 38 ca 40 12 \tsha1msg2 0x12(%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x45, 0x12, }, 5, 0, "", "",
>> +"0f 38 ca 45 12 \tsha1msg2 0x12(%ebp),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x44, 0x01, 0x12, }, 6, 0, "", "",
>> +"0f 38 ca 44 01 12 \tsha1msg2 0x12(%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x44, 0x05, 0x12, }, 6, 0, "", "",
>> +"0f 38 ca 44 05 12 \tsha1msg2 0x12(%ebp,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x44, 0x08, 0x12, }, 6, 0, "", "",
>> +"0f 38 ca 44 08 12 \tsha1msg2 0x12(%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x44, 0xc8, 0x12, }, 6, 0, "", "",
>> +"0f 38 ca 44 c8 12 \tsha1msg2 0x12(%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x80, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 ca 80 78 56 34 12 \tsha1msg2 0x12345678(%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x85, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 ca 85 78 56 34 12 \tsha1msg2 0x12345678(%ebp),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x84, 0x01, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 ca 84 01 78 56 34 12 \tsha1msg2 0x12345678(%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x84, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 ca 84 05 78 56 34 12 \tsha1msg2 0x12345678(%ebp,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 ca 84 08 78 56 34 12 \tsha1msg2 0x12345678(%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x84, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 ca 84 c8 78 56 34 12 \tsha1msg2 0x12345678(%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xcb, 0xcc, }, 4, 0, "", "",
>> +"0f 38 cb cc \tsha256rnds2 %xmm0,%xmm4,%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0xd7, }, 4, 0, "", "",
>> +"0f 38 cb d7 \tsha256rnds2 %xmm0,%xmm7,%xmm2",},
>> +{{0x0f, 0x38, 0xcb, 0x08, }, 4, 0, "", "",
>> +"0f 38 cb 08 \tsha256rnds2 %xmm0,(%eax),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x0d, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cb 0d 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678,%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x18, }, 4, 0, "", "",
>> +"0f 38 cb 18 \tsha256rnds2 %xmm0,(%eax),%xmm3",},
>> +{{0x0f, 0x38, 0xcb, 0x0c, 0x01, }, 5, 0, "", "",
>> +"0f 38 cb 0c 01 \tsha256rnds2 %xmm0,(%ecx,%eax,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x0c, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cb 0c 05 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(,%eax,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x0c, 0x08, }, 5, 0, "", "",
>> +"0f 38 cb 0c 08 \tsha256rnds2 %xmm0,(%eax,%ecx,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x0c, 0xc8, }, 5, 0, "", "",
>> +"0f 38 cb 0c c8 \tsha256rnds2 %xmm0,(%eax,%ecx,8),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x48, 0x12, }, 5, 0, "", "",
>> +"0f 38 cb 48 12 \tsha256rnds2 %xmm0,0x12(%eax),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x4d, 0x12, }, 5, 0, "", "",
>> +"0f 38 cb 4d 12 \tsha256rnds2 %xmm0,0x12(%ebp),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x4c, 0x01, 0x12, }, 6, 0, "", "",
>> +"0f 38 cb 4c 01 12 \tsha256rnds2 %xmm0,0x12(%ecx,%eax,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x4c, 0x05, 0x12, }, 6, 0, "", "",
>> +"0f 38 cb 4c 05 12 \tsha256rnds2 %xmm0,0x12(%ebp,%eax,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x4c, 0x08, 0x12, }, 6, 0, "", "",
>> +"0f 38 cb 4c 08 12 \tsha256rnds2 %xmm0,0x12(%eax,%ecx,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x4c, 0xc8, 0x12, }, 6, 0, "", "",
>> +"0f 38 cb 4c c8 12 \tsha256rnds2 %xmm0,0x12(%eax,%ecx,8),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x88, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cb 88 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(%eax),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x8d, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cb 8d 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(%ebp),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x8c, 0x01, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cb 8c 01 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(%ecx,%eax,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x8c, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cb 8c 05 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(%ebp,%eax,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x8c, 0x08, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cb 8c 08 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(%eax,%ecx,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x8c, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cb 8c c8 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(%eax,%ecx,8),%xmm1",},
>> +{{0x0f, 0x38, 0xcc, 0xc1, }, 4, 0, "", "",
>> +"0f 38 cc c1 \tsha256msg1 %xmm1,%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0xd7, }, 4, 0, "", "",
>> +"0f 38 cc d7 \tsha256msg1 %xmm7,%xmm2",},
>> +{{0x0f, 0x38, 0xcc, 0x00, }, 4, 0, "", "",
>> +"0f 38 cc 00 \tsha256msg1 (%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x05, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cc 05 78 56 34 12 \tsha256msg1 0x12345678,%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x18, }, 4, 0, "", "",
>> +"0f 38 cc 18 \tsha256msg1 (%eax),%xmm3",},
>> +{{0x0f, 0x38, 0xcc, 0x04, 0x01, }, 5, 0, "", "",
>> +"0f 38 cc 04 01 \tsha256msg1 (%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x04, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cc 04 05 78 56 34 12 \tsha256msg1 0x12345678(,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x04, 0x08, }, 5, 0, "", "",
>> +"0f 38 cc 04 08 \tsha256msg1 (%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x04, 0xc8, }, 5, 0, "", "",
>> +"0f 38 cc 04 c8 \tsha256msg1 (%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x40, 0x12, }, 5, 0, "", "",
>> +"0f 38 cc 40 12 \tsha256msg1 0x12(%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x45, 0x12, }, 5, 0, "", "",
>> +"0f 38 cc 45 12 \tsha256msg1 0x12(%ebp),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x44, 0x01, 0x12, }, 6, 0, "", "",
>> +"0f 38 cc 44 01 12 \tsha256msg1 0x12(%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x44, 0x05, 0x12, }, 6, 0, "", "",
>> +"0f 38 cc 44 05 12 \tsha256msg1 0x12(%ebp,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x44, 0x08, 0x12, }, 6, 0, "", "",
>> +"0f 38 cc 44 08 12 \tsha256msg1 0x12(%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x44, 0xc8, 0x12, }, 6, 0, "", "",
>> +"0f 38 cc 44 c8 12 \tsha256msg1 0x12(%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x80, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cc 80 78 56 34 12 \tsha256msg1 0x12345678(%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x85, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cc 85 78 56 34 12 \tsha256msg1 0x12345678(%ebp),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x84, 0x01, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cc 84 01 78 56 34 12 \tsha256msg1 0x12345678(%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x84, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cc 84 05 78 56 34 12 \tsha256msg1 0x12345678(%ebp,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cc 84 08 78 56 34 12 \tsha256msg1 0x12345678(%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x84, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cc 84 c8 78 56 34 12 \tsha256msg1 0x12345678(%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0xc1, }, 4, 0, "", "",
>> +"0f 38 cd c1 \tsha256msg2 %xmm1,%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0xd7, }, 4, 0, "", "",
>> +"0f 38 cd d7 \tsha256msg2 %xmm7,%xmm2",},
>> +{{0x0f, 0x38, 0xcd, 0x00, }, 4, 0, "", "",
>> +"0f 38 cd 00 \tsha256msg2 (%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x05, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cd 05 78 56 34 12 \tsha256msg2 0x12345678,%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x18, }, 4, 0, "", "",
>> +"0f 38 cd 18 \tsha256msg2 (%eax),%xmm3",},
>> +{{0x0f, 0x38, 0xcd, 0x04, 0x01, }, 5, 0, "", "",
>> +"0f 38 cd 04 01 \tsha256msg2 (%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x04, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cd 04 05 78 56 34 12 \tsha256msg2 0x12345678(,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x04, 0x08, }, 5, 0, "", "",
>> +"0f 38 cd 04 08 \tsha256msg2 (%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x04, 0xc8, }, 5, 0, "", "",
>> +"0f 38 cd 04 c8 \tsha256msg2 (%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x40, 0x12, }, 5, 0, "", "",
>> +"0f 38 cd 40 12 \tsha256msg2 0x12(%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x45, 0x12, }, 5, 0, "", "",
>> +"0f 38 cd 45 12 \tsha256msg2 0x12(%ebp),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x44, 0x01, 0x12, }, 6, 0, "", "",
>> +"0f 38 cd 44 01 12 \tsha256msg2 0x12(%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x44, 0x05, 0x12, }, 6, 0, "", "",
>> +"0f 38 cd 44 05 12 \tsha256msg2 0x12(%ebp,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x44, 0x08, 0x12, }, 6, 0, "", "",
>> +"0f 38 cd 44 08 12 \tsha256msg2 0x12(%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x44, 0xc8, 0x12, }, 6, 0, "", "",
>> +"0f 38 cd 44 c8 12 \tsha256msg2 0x12(%eax,%ecx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x80, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cd 80 78 56 34 12 \tsha256msg2 0x12345678(%eax),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x85, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cd 85 78 56 34 12 \tsha256msg2 0x12345678(%ebp),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x84, 0x01, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cd 84 01 78 56 34 12 \tsha256msg2 0x12345678(%ecx,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x84, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cd 84 05 78 56 34 12 \tsha256msg2 0x12345678(%ebp,%eax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cd 84 08 78 56 34 12 \tsha256msg2 0x12345678(%eax,%ecx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x84, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cd 84 c8 78 56 34 12 \tsha256msg2 0x12345678(%eax,%ecx,8),%xmm0",},
>> diff --git a/tools/perf/tests/insn-x86-dat-64.c b/tools/perf/tests/insn-x86-dat-64.c
>> index 01122421a776..13f008588590 100644
>> --- a/tools/perf/tests/insn-x86-dat-64.c
>> +++ b/tools/perf/tests/insn-x86-dat-64.c
>> @@ -338,3 +338,367 @@
>> "67 f2 ff 21 \tbnd jmpq *(%ecx)",},
>> {{0xf2, 0x0f, 0x85, 0x00, 0x00, 0x00, 0x00, }, 7, 0, "jcc", "conditional",
>> "f2 0f 85 00 00 00 00 \tbnd jne 413 <main+0x413>",},
>> +{{0x0f, 0x3a, 0xcc, 0xc1, 0x00, }, 5, 0, "", "",
>> +"0f 3a cc c1 00 \tsha1rnds4 $0x0,%xmm1,%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0xd7, 0x91, }, 5, 0, "", "",
>> +"0f 3a cc d7 91 \tsha1rnds4 $0x91,%xmm7,%xmm2",},
>> +{{0x41, 0x0f, 0x3a, 0xcc, 0xc0, 0x91, }, 6, 0, "", "",
>> +"41 0f 3a cc c0 91 \tsha1rnds4 $0x91,%xmm8,%xmm0",},
>> +{{0x44, 0x0f, 0x3a, 0xcc, 0xc7, 0x91, }, 6, 0, "", "",
>> +"44 0f 3a cc c7 91 \tsha1rnds4 $0x91,%xmm7,%xmm8",},
>> +{{0x45, 0x0f, 0x3a, 0xcc, 0xc7, 0x91, }, 6, 0, "", "",
>> +"45 0f 3a cc c7 91 \tsha1rnds4 $0x91,%xmm15,%xmm8",},
>> +{{0x0f, 0x3a, 0xcc, 0x00, 0x91, }, 5, 0, "", "",
>> +"0f 3a cc 00 91 \tsha1rnds4 $0x91,(%rax),%xmm0",},
>> +{{0x41, 0x0f, 0x3a, 0xcc, 0x00, 0x91, }, 6, 0, "", "",
>> +"41 0f 3a cc 00 91 \tsha1rnds4 $0x91,(%r8),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x04, 0x25, 0x78, 0x56, 0x34, 0x12, 0x91, }, 10, 0, "", "",
>> +"0f 3a cc 04 25 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678,%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x18, 0x91, }, 5, 0, "", "",
>> +"0f 3a cc 18 91 \tsha1rnds4 $0x91,(%rax),%xmm3",},
>> +{{0x0f, 0x3a, 0xcc, 0x04, 0x01, 0x91, }, 6, 0, "", "",
>> +"0f 3a cc 04 01 91 \tsha1rnds4 $0x91,(%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x04, 0x05, 0x78, 0x56, 0x34, 0x12, 0x91, }, 10, 0, "", "",
>> +"0f 3a cc 04 05 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(,%rax,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x04, 0x08, 0x91, }, 6, 0, "", "",
>> +"0f 3a cc 04 08 91 \tsha1rnds4 $0x91,(%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x04, 0xc8, 0x91, }, 6, 0, "", "",
>> +"0f 3a cc 04 c8 91 \tsha1rnds4 $0x91,(%rax,%rcx,8),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x40, 0x12, 0x91, }, 6, 0, "", "",
>> +"0f 3a cc 40 12 91 \tsha1rnds4 $0x91,0x12(%rax),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x45, 0x12, 0x91, }, 6, 0, "", "",
>> +"0f 3a cc 45 12 91 \tsha1rnds4 $0x91,0x12(%rbp),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x44, 0x01, 0x12, 0x91, }, 7, 0, "", "",
>> +"0f 3a cc 44 01 12 91 \tsha1rnds4 $0x91,0x12(%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x44, 0x05, 0x12, 0x91, }, 7, 0, "", "",
>> +"0f 3a cc 44 05 12 91 \tsha1rnds4 $0x91,0x12(%rbp,%rax,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x44, 0x08, 0x12, 0x91, }, 7, 0, "", "",
>> +"0f 3a cc 44 08 12 91 \tsha1rnds4 $0x91,0x12(%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x44, 0xc8, 0x12, 0x91, }, 7, 0, "", "",
>> +"0f 3a cc 44 c8 12 91 \tsha1rnds4 $0x91,0x12(%rax,%rcx,8),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x80, 0x78, 0x56, 0x34, 0x12, 0x91, }, 9, 0, "", "",
>> +"0f 3a cc 80 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(%rax),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x85, 0x78, 0x56, 0x34, 0x12, 0x91, }, 9, 0, "", "",
>> +"0f 3a cc 85 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(%rbp),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x84, 0x01, 0x78, 0x56, 0x34, 0x12, 0x91, }, 10, 0, "", "",
>> +"0f 3a cc 84 01 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x84, 0x05, 0x78, 0x56, 0x34, 0x12, 0x91, }, 10, 0, "", "",
>> +"0f 3a cc 84 05 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(%rbp,%rax,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, 0x91, }, 10, 0, "", "",
>> +"0f 3a cc 84 08 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x3a, 0xcc, 0x84, 0xc8, 0x78, 0x56, 0x34, 0x12, 0x91, }, 10, 0, "", "",
>> +"0f 3a cc 84 c8 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(%rax,%rcx,8),%xmm0",},
>> +{{0x44, 0x0f, 0x3a, 0xcc, 0xbc, 0xc8, 0x78, 0x56, 0x34, 0x12, 0x91, }, 11, 0, "", "",
>> +"44 0f 3a cc bc c8 78 56 34 12 91 \tsha1rnds4 $0x91,0x12345678(%rax,%rcx,8),%xmm15",},
>> +{{0x0f, 0x38, 0xc8, 0xc1, }, 4, 0, "", "",
>> +"0f 38 c8 c1 \tsha1nexte %xmm1,%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0xd7, }, 4, 0, "", "",
>> +"0f 38 c8 d7 \tsha1nexte %xmm7,%xmm2",},
>> +{{0x41, 0x0f, 0x38, 0xc8, 0xc0, }, 5, 0, "", "",
>> +"41 0f 38 c8 c0 \tsha1nexte %xmm8,%xmm0",},
>> +{{0x44, 0x0f, 0x38, 0xc8, 0xc7, }, 5, 0, "", "",
>> +"44 0f 38 c8 c7 \tsha1nexte %xmm7,%xmm8",},
>> +{{0x45, 0x0f, 0x38, 0xc8, 0xc7, }, 5, 0, "", "",
>> +"45 0f 38 c8 c7 \tsha1nexte %xmm15,%xmm8",},
>> +{{0x0f, 0x38, 0xc8, 0x00, }, 4, 0, "", "",
>> +"0f 38 c8 00 \tsha1nexte (%rax),%xmm0",},
>> +{{0x41, 0x0f, 0x38, 0xc8, 0x00, }, 5, 0, "", "",
>> +"41 0f 38 c8 00 \tsha1nexte (%r8),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x04, 0x25, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c8 04 25 78 56 34 12 \tsha1nexte 0x12345678,%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x18, }, 4, 0, "", "",
>> +"0f 38 c8 18 \tsha1nexte (%rax),%xmm3",},
>> +{{0x0f, 0x38, 0xc8, 0x04, 0x01, }, 5, 0, "", "",
>> +"0f 38 c8 04 01 \tsha1nexte (%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x04, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c8 04 05 78 56 34 12 \tsha1nexte 0x12345678(,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x04, 0x08, }, 5, 0, "", "",
>> +"0f 38 c8 04 08 \tsha1nexte (%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x04, 0xc8, }, 5, 0, "", "",
>> +"0f 38 c8 04 c8 \tsha1nexte (%rax,%rcx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x40, 0x12, }, 5, 0, "", "",
>> +"0f 38 c8 40 12 \tsha1nexte 0x12(%rax),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x45, 0x12, }, 5, 0, "", "",
>> +"0f 38 c8 45 12 \tsha1nexte 0x12(%rbp),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x44, 0x01, 0x12, }, 6, 0, "", "",
>> +"0f 38 c8 44 01 12 \tsha1nexte 0x12(%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x44, 0x05, 0x12, }, 6, 0, "", "",
>> +"0f 38 c8 44 05 12 \tsha1nexte 0x12(%rbp,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x44, 0x08, 0x12, }, 6, 0, "", "",
>> +"0f 38 c8 44 08 12 \tsha1nexte 0x12(%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x44, 0xc8, 0x12, }, 6, 0, "", "",
>> +"0f 38 c8 44 c8 12 \tsha1nexte 0x12(%rax,%rcx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x80, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 c8 80 78 56 34 12 \tsha1nexte 0x12345678(%rax),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x85, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 c8 85 78 56 34 12 \tsha1nexte 0x12345678(%rbp),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x84, 0x01, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c8 84 01 78 56 34 12 \tsha1nexte 0x12345678(%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x84, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c8 84 05 78 56 34 12 \tsha1nexte 0x12345678(%rbp,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c8 84 08 78 56 34 12 \tsha1nexte 0x12345678(%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc8, 0x84, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c8 84 c8 78 56 34 12 \tsha1nexte 0x12345678(%rax,%rcx,8),%xmm0",},
>> +{{0x44, 0x0f, 0x38, 0xc8, 0xbc, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 10, 0, "", "",
>> +"44 0f 38 c8 bc c8 78 56 34 12 \tsha1nexte 0x12345678(%rax,%rcx,8),%xmm15",},
>> +{{0x0f, 0x38, 0xc9, 0xc1, }, 4, 0, "", "",
>> +"0f 38 c9 c1 \tsha1msg1 %xmm1,%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0xd7, }, 4, 0, "", "",
>> +"0f 38 c9 d7 \tsha1msg1 %xmm7,%xmm2",},
>> +{{0x41, 0x0f, 0x38, 0xc9, 0xc0, }, 5, 0, "", "",
>> +"41 0f 38 c9 c0 \tsha1msg1 %xmm8,%xmm0",},
>> +{{0x44, 0x0f, 0x38, 0xc9, 0xc7, }, 5, 0, "", "",
>> +"44 0f 38 c9 c7 \tsha1msg1 %xmm7,%xmm8",},
>> +{{0x45, 0x0f, 0x38, 0xc9, 0xc7, }, 5, 0, "", "",
>> +"45 0f 38 c9 c7 \tsha1msg1 %xmm15,%xmm8",},
>> +{{0x0f, 0x38, 0xc9, 0x00, }, 4, 0, "", "",
>> +"0f 38 c9 00 \tsha1msg1 (%rax),%xmm0",},
>> +{{0x41, 0x0f, 0x38, 0xc9, 0x00, }, 5, 0, "", "",
>> +"41 0f 38 c9 00 \tsha1msg1 (%r8),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x04, 0x25, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c9 04 25 78 56 34 12 \tsha1msg1 0x12345678,%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x18, }, 4, 0, "", "",
>> +"0f 38 c9 18 \tsha1msg1 (%rax),%xmm3",},
>> +{{0x0f, 0x38, 0xc9, 0x04, 0x01, }, 5, 0, "", "",
>> +"0f 38 c9 04 01 \tsha1msg1 (%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x04, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c9 04 05 78 56 34 12 \tsha1msg1 0x12345678(,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x04, 0x08, }, 5, 0, "", "",
>> +"0f 38 c9 04 08 \tsha1msg1 (%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x04, 0xc8, }, 5, 0, "", "",
>> +"0f 38 c9 04 c8 \tsha1msg1 (%rax,%rcx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x40, 0x12, }, 5, 0, "", "",
>> +"0f 38 c9 40 12 \tsha1msg1 0x12(%rax),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x45, 0x12, }, 5, 0, "", "",
>> +"0f 38 c9 45 12 \tsha1msg1 0x12(%rbp),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x44, 0x01, 0x12, }, 6, 0, "", "",
>> +"0f 38 c9 44 01 12 \tsha1msg1 0x12(%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x44, 0x05, 0x12, }, 6, 0, "", "",
>> +"0f 38 c9 44 05 12 \tsha1msg1 0x12(%rbp,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x44, 0x08, 0x12, }, 6, 0, "", "",
>> +"0f 38 c9 44 08 12 \tsha1msg1 0x12(%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x44, 0xc8, 0x12, }, 6, 0, "", "",
>> +"0f 38 c9 44 c8 12 \tsha1msg1 0x12(%rax,%rcx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x80, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 c9 80 78 56 34 12 \tsha1msg1 0x12345678(%rax),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x85, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 c9 85 78 56 34 12 \tsha1msg1 0x12345678(%rbp),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x84, 0x01, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c9 84 01 78 56 34 12 \tsha1msg1 0x12345678(%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x84, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c9 84 05 78 56 34 12 \tsha1msg1 0x12345678(%rbp,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c9 84 08 78 56 34 12 \tsha1msg1 0x12345678(%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xc9, 0x84, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 c9 84 c8 78 56 34 12 \tsha1msg1 0x12345678(%rax,%rcx,8),%xmm0",},
>> +{{0x44, 0x0f, 0x38, 0xc9, 0xbc, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 10, 0, "", "",
>> +"44 0f 38 c9 bc c8 78 56 34 12 \tsha1msg1 0x12345678(%rax,%rcx,8),%xmm15",},
>> +{{0x0f, 0x38, 0xca, 0xc1, }, 4, 0, "", "",
>> +"0f 38 ca c1 \tsha1msg2 %xmm1,%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0xd7, }, 4, 0, "", "",
>> +"0f 38 ca d7 \tsha1msg2 %xmm7,%xmm2",},
>> +{{0x41, 0x0f, 0x38, 0xca, 0xc0, }, 5, 0, "", "",
>> +"41 0f 38 ca c0 \tsha1msg2 %xmm8,%xmm0",},
>> +{{0x44, 0x0f, 0x38, 0xca, 0xc7, }, 5, 0, "", "",
>> +"44 0f 38 ca c7 \tsha1msg2 %xmm7,%xmm8",},
>> +{{0x45, 0x0f, 0x38, 0xca, 0xc7, }, 5, 0, "", "",
>> +"45 0f 38 ca c7 \tsha1msg2 %xmm15,%xmm8",},
>> +{{0x0f, 0x38, 0xca, 0x00, }, 4, 0, "", "",
>> +"0f 38 ca 00 \tsha1msg2 (%rax),%xmm0",},
>> +{{0x41, 0x0f, 0x38, 0xca, 0x00, }, 5, 0, "", "",
>> +"41 0f 38 ca 00 \tsha1msg2 (%r8),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x04, 0x25, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 ca 04 25 78 56 34 12 \tsha1msg2 0x12345678,%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x18, }, 4, 0, "", "",
>> +"0f 38 ca 18 \tsha1msg2 (%rax),%xmm3",},
>> +{{0x0f, 0x38, 0xca, 0x04, 0x01, }, 5, 0, "", "",
>> +"0f 38 ca 04 01 \tsha1msg2 (%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x04, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 ca 04 05 78 56 34 12 \tsha1msg2 0x12345678(,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x04, 0x08, }, 5, 0, "", "",
>> +"0f 38 ca 04 08 \tsha1msg2 (%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x04, 0xc8, }, 5, 0, "", "",
>> +"0f 38 ca 04 c8 \tsha1msg2 (%rax,%rcx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x40, 0x12, }, 5, 0, "", "",
>> +"0f 38 ca 40 12 \tsha1msg2 0x12(%rax),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x45, 0x12, }, 5, 0, "", "",
>> +"0f 38 ca 45 12 \tsha1msg2 0x12(%rbp),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x44, 0x01, 0x12, }, 6, 0, "", "",
>> +"0f 38 ca 44 01 12 \tsha1msg2 0x12(%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x44, 0x05, 0x12, }, 6, 0, "", "",
>> +"0f 38 ca 44 05 12 \tsha1msg2 0x12(%rbp,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x44, 0x08, 0x12, }, 6, 0, "", "",
>> +"0f 38 ca 44 08 12 \tsha1msg2 0x12(%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x44, 0xc8, 0x12, }, 6, 0, "", "",
>> +"0f 38 ca 44 c8 12 \tsha1msg2 0x12(%rax,%rcx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x80, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 ca 80 78 56 34 12 \tsha1msg2 0x12345678(%rax),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x85, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 ca 85 78 56 34 12 \tsha1msg2 0x12345678(%rbp),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x84, 0x01, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 ca 84 01 78 56 34 12 \tsha1msg2 0x12345678(%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x84, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 ca 84 05 78 56 34 12 \tsha1msg2 0x12345678(%rbp,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 ca 84 08 78 56 34 12 \tsha1msg2 0x12345678(%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xca, 0x84, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 ca 84 c8 78 56 34 12 \tsha1msg2 0x12345678(%rax,%rcx,8),%xmm0",},
>> +{{0x44, 0x0f, 0x38, 0xca, 0xbc, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 10, 0, "", "",
>> +"44 0f 38 ca bc c8 78 56 34 12 \tsha1msg2 0x12345678(%rax,%rcx,8),%xmm15",},
>> +{{0x0f, 0x38, 0xcb, 0xcc, }, 4, 0, "", "",
>> +"0f 38 cb cc \tsha256rnds2 %xmm0,%xmm4,%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0xd7, }, 4, 0, "", "",
>> +"0f 38 cb d7 \tsha256rnds2 %xmm0,%xmm7,%xmm2",},
>> +{{0x41, 0x0f, 0x38, 0xcb, 0xc8, }, 5, 0, "", "",
>> +"41 0f 38 cb c8 \tsha256rnds2 %xmm0,%xmm8,%xmm1",},
>> +{{0x44, 0x0f, 0x38, 0xcb, 0xc7, }, 5, 0, "", "",
>> +"44 0f 38 cb c7 \tsha256rnds2 %xmm0,%xmm7,%xmm8",},
>> +{{0x45, 0x0f, 0x38, 0xcb, 0xc7, }, 5, 0, "", "",
>> +"45 0f 38 cb c7 \tsha256rnds2 %xmm0,%xmm15,%xmm8",},
>> +{{0x0f, 0x38, 0xcb, 0x08, }, 4, 0, "", "",
>> +"0f 38 cb 08 \tsha256rnds2 %xmm0,(%rax),%xmm1",},
>> +{{0x41, 0x0f, 0x38, 0xcb, 0x08, }, 5, 0, "", "",
>> +"41 0f 38 cb 08 \tsha256rnds2 %xmm0,(%r8),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x0c, 0x25, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cb 0c 25 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678,%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x18, }, 4, 0, "", "",
>> +"0f 38 cb 18 \tsha256rnds2 %xmm0,(%rax),%xmm3",},
>> +{{0x0f, 0x38, 0xcb, 0x0c, 0x01, }, 5, 0, "", "",
>> +"0f 38 cb 0c 01 \tsha256rnds2 %xmm0,(%rcx,%rax,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x0c, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cb 0c 05 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(,%rax,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x0c, 0x08, }, 5, 0, "", "",
>> +"0f 38 cb 0c 08 \tsha256rnds2 %xmm0,(%rax,%rcx,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x0c, 0xc8, }, 5, 0, "", "",
>> +"0f 38 cb 0c c8 \tsha256rnds2 %xmm0,(%rax,%rcx,8),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x48, 0x12, }, 5, 0, "", "",
>> +"0f 38 cb 48 12 \tsha256rnds2 %xmm0,0x12(%rax),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x4d, 0x12, }, 5, 0, "", "",
>> +"0f 38 cb 4d 12 \tsha256rnds2 %xmm0,0x12(%rbp),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x4c, 0x01, 0x12, }, 6, 0, "", "",
>> +"0f 38 cb 4c 01 12 \tsha256rnds2 %xmm0,0x12(%rcx,%rax,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x4c, 0x05, 0x12, }, 6, 0, "", "",
>> +"0f 38 cb 4c 05 12 \tsha256rnds2 %xmm0,0x12(%rbp,%rax,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x4c, 0x08, 0x12, }, 6, 0, "", "",
>> +"0f 38 cb 4c 08 12 \tsha256rnds2 %xmm0,0x12(%rax,%rcx,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x4c, 0xc8, 0x12, }, 6, 0, "", "",
>> +"0f 38 cb 4c c8 12 \tsha256rnds2 %xmm0,0x12(%rax,%rcx,8),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x88, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cb 88 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(%rax),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x8d, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cb 8d 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(%rbp),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x8c, 0x01, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cb 8c 01 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(%rcx,%rax,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x8c, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cb 8c 05 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(%rbp,%rax,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x8c, 0x08, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cb 8c 08 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(%rax,%rcx,1),%xmm1",},
>> +{{0x0f, 0x38, 0xcb, 0x8c, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cb 8c c8 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(%rax,%rcx,8),%xmm1",},
>> +{{0x44, 0x0f, 0x38, 0xcb, 0xbc, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 10, 0, "", "",
>> +"44 0f 38 cb bc c8 78 56 34 12 \tsha256rnds2 %xmm0,0x12345678(%rax,%rcx,8),%xmm15",},
>> +{{0x0f, 0x38, 0xcc, 0xc1, }, 4, 0, "", "",
>> +"0f 38 cc c1 \tsha256msg1 %xmm1,%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0xd7, }, 4, 0, "", "",
>> +"0f 38 cc d7 \tsha256msg1 %xmm7,%xmm2",},
>> +{{0x41, 0x0f, 0x38, 0xcc, 0xc0, }, 5, 0, "", "",
>> +"41 0f 38 cc c0 \tsha256msg1 %xmm8,%xmm0",},
>> +{{0x44, 0x0f, 0x38, 0xcc, 0xc7, }, 5, 0, "", "",
>> +"44 0f 38 cc c7 \tsha256msg1 %xmm7,%xmm8",},
>> +{{0x45, 0x0f, 0x38, 0xcc, 0xc7, }, 5, 0, "", "",
>> +"45 0f 38 cc c7 \tsha256msg1 %xmm15,%xmm8",},
>> +{{0x0f, 0x38, 0xcc, 0x00, }, 4, 0, "", "",
>> +"0f 38 cc 00 \tsha256msg1 (%rax),%xmm0",},
>> +{{0x41, 0x0f, 0x38, 0xcc, 0x00, }, 5, 0, "", "",
>> +"41 0f 38 cc 00 \tsha256msg1 (%r8),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x04, 0x25, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cc 04 25 78 56 34 12 \tsha256msg1 0x12345678,%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x18, }, 4, 0, "", "",
>> +"0f 38 cc 18 \tsha256msg1 (%rax),%xmm3",},
>> +{{0x0f, 0x38, 0xcc, 0x04, 0x01, }, 5, 0, "", "",
>> +"0f 38 cc 04 01 \tsha256msg1 (%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x04, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cc 04 05 78 56 34 12 \tsha256msg1 0x12345678(,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x04, 0x08, }, 5, 0, "", "",
>> +"0f 38 cc 04 08 \tsha256msg1 (%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x04, 0xc8, }, 5, 0, "", "",
>> +"0f 38 cc 04 c8 \tsha256msg1 (%rax,%rcx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x40, 0x12, }, 5, 0, "", "",
>> +"0f 38 cc 40 12 \tsha256msg1 0x12(%rax),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x45, 0x12, }, 5, 0, "", "",
>> +"0f 38 cc 45 12 \tsha256msg1 0x12(%rbp),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x44, 0x01, 0x12, }, 6, 0, "", "",
>> +"0f 38 cc 44 01 12 \tsha256msg1 0x12(%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x44, 0x05, 0x12, }, 6, 0, "", "",
>> +"0f 38 cc 44 05 12 \tsha256msg1 0x12(%rbp,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x44, 0x08, 0x12, }, 6, 0, "", "",
>> +"0f 38 cc 44 08 12 \tsha256msg1 0x12(%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x44, 0xc8, 0x12, }, 6, 0, "", "",
>> +"0f 38 cc 44 c8 12 \tsha256msg1 0x12(%rax,%rcx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x80, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cc 80 78 56 34 12 \tsha256msg1 0x12345678(%rax),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x85, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cc 85 78 56 34 12 \tsha256msg1 0x12345678(%rbp),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x84, 0x01, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cc 84 01 78 56 34 12 \tsha256msg1 0x12345678(%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x84, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cc 84 05 78 56 34 12 \tsha256msg1 0x12345678(%rbp,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cc 84 08 78 56 34 12 \tsha256msg1 0x12345678(%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcc, 0x84, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cc 84 c8 78 56 34 12 \tsha256msg1 0x12345678(%rax,%rcx,8),%xmm0",},
>> +{{0x44, 0x0f, 0x38, 0xcc, 0xbc, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 10, 0, "", "",
>> +"44 0f 38 cc bc c8 78 56 34 12 \tsha256msg1 0x12345678(%rax,%rcx,8),%xmm15",},
>> +{{0x0f, 0x38, 0xcd, 0xc1, }, 4, 0, "", "",
>> +"0f 38 cd c1 \tsha256msg2 %xmm1,%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0xd7, }, 4, 0, "", "",
>> +"0f 38 cd d7 \tsha256msg2 %xmm7,%xmm2",},
>> +{{0x41, 0x0f, 0x38, 0xcd, 0xc0, }, 5, 0, "", "",
>> +"41 0f 38 cd c0 \tsha256msg2 %xmm8,%xmm0",},
>> +{{0x44, 0x0f, 0x38, 0xcd, 0xc7, }, 5, 0, "", "",
>> +"44 0f 38 cd c7 \tsha256msg2 %xmm7,%xmm8",},
>> +{{0x45, 0x0f, 0x38, 0xcd, 0xc7, }, 5, 0, "", "",
>> +"45 0f 38 cd c7 \tsha256msg2 %xmm15,%xmm8",},
>> +{{0x0f, 0x38, 0xcd, 0x00, }, 4, 0, "", "",
>> +"0f 38 cd 00 \tsha256msg2 (%rax),%xmm0",},
>> +{{0x41, 0x0f, 0x38, 0xcd, 0x00, }, 5, 0, "", "",
>> +"41 0f 38 cd 00 \tsha256msg2 (%r8),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x04, 0x25, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cd 04 25 78 56 34 12 \tsha256msg2 0x12345678,%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x18, }, 4, 0, "", "",
>> +"0f 38 cd 18 \tsha256msg2 (%rax),%xmm3",},
>> +{{0x0f, 0x38, 0xcd, 0x04, 0x01, }, 5, 0, "", "",
>> +"0f 38 cd 04 01 \tsha256msg2 (%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x04, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cd 04 05 78 56 34 12 \tsha256msg2 0x12345678(,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x04, 0x08, }, 5, 0, "", "",
>> +"0f 38 cd 04 08 \tsha256msg2 (%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x04, 0xc8, }, 5, 0, "", "",
>> +"0f 38 cd 04 c8 \tsha256msg2 (%rax,%rcx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x40, 0x12, }, 5, 0, "", "",
>> +"0f 38 cd 40 12 \tsha256msg2 0x12(%rax),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x45, 0x12, }, 5, 0, "", "",
>> +"0f 38 cd 45 12 \tsha256msg2 0x12(%rbp),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x44, 0x01, 0x12, }, 6, 0, "", "",
>> +"0f 38 cd 44 01 12 \tsha256msg2 0x12(%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x44, 0x05, 0x12, }, 6, 0, "", "",
>> +"0f 38 cd 44 05 12 \tsha256msg2 0x12(%rbp,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x44, 0x08, 0x12, }, 6, 0, "", "",
>> +"0f 38 cd 44 08 12 \tsha256msg2 0x12(%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x44, 0xc8, 0x12, }, 6, 0, "", "",
>> +"0f 38 cd 44 c8 12 \tsha256msg2 0x12(%rax,%rcx,8),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x80, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cd 80 78 56 34 12 \tsha256msg2 0x12345678(%rax),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x85, 0x78, 0x56, 0x34, 0x12, }, 8, 0, "", "",
>> +"0f 38 cd 85 78 56 34 12 \tsha256msg2 0x12345678(%rbp),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x84, 0x01, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cd 84 01 78 56 34 12 \tsha256msg2 0x12345678(%rcx,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x84, 0x05, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cd 84 05 78 56 34 12 \tsha256msg2 0x12345678(%rbp,%rax,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x84, 0x08, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cd 84 08 78 56 34 12 \tsha256msg2 0x12345678(%rax,%rcx,1),%xmm0",},
>> +{{0x0f, 0x38, 0xcd, 0x84, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 9, 0, "", "",
>> +"0f 38 cd 84 c8 78 56 34 12 \tsha256msg2 0x12345678(%rax,%rcx,8),%xmm0",},
>> +{{0x44, 0x0f, 0x38, 0xcd, 0xbc, 0xc8, 0x78, 0x56, 0x34, 0x12, }, 10, 0, "", "",
>> +"44 0f 38 cd bc c8 78 56 34 12 \tsha256msg2 0x12345678(%rax,%rcx,8),%xmm15",},
>> diff --git a/tools/perf/tests/insn-x86-dat-src.c b/tools/perf/tests/insn-x86-dat-src.c
>> index b506830f33a8..7d06c9b22070 100644
>> --- a/tools/perf/tests/insn-x86-dat-src.c
>> +++ b/tools/perf/tests/insn-x86-dat-src.c
>> @@ -217,6 +217,210 @@ int main(void)
>> asm volatile("bnd jmp *(%ecx)"); /* Expecting: jmp indirect 0 */
>> asm volatile("bnd jne label1"); /* Expecting: jcc conditional 0 */
>>
>> + /* sha1rnds4 imm8, xmm2/m128, xmm1 */
>> +
>> + asm volatile("sha1rnds4 $0x0, %xmm1, %xmm0");
>> + asm volatile("sha1rnds4 $0x91, %xmm7, %xmm2");
>> + asm volatile("sha1rnds4 $0x91, %xmm8, %xmm0");
>> + asm volatile("sha1rnds4 $0x91, %xmm7, %xmm8");
>> + asm volatile("sha1rnds4 $0x91, %xmm15, %xmm8");
>> + asm volatile("sha1rnds4 $0x91, (%rax), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, (%r8), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, (0x12345678), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, (%rax), %xmm3");
>> + asm volatile("sha1rnds4 $0x91, (%rcx,%rax,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(,%rax,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, (%rax,%rcx,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, (%rax,%rcx,8), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12(%rax), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12(%rbp), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12(%rcx,%rax,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12(%rbp,%rax,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12(%rax,%rcx,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12(%rax,%rcx,8), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(%rax), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(%rbp), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(%rcx,%rax,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(%rbp,%rax,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,8), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,8), %xmm15");
>> +
>> + /* sha1nexte xmm2/m128, xmm1 */
>> +
>> + asm volatile("sha1nexte %xmm1, %xmm0");
>> + asm volatile("sha1nexte %xmm7, %xmm2");
>> + asm volatile("sha1nexte %xmm8, %xmm0");
>> + asm volatile("sha1nexte %xmm7, %xmm8");
>> + asm volatile("sha1nexte %xmm15, %xmm8");
>> + asm volatile("sha1nexte (%rax), %xmm0");
>> + asm volatile("sha1nexte (%r8), %xmm0");
>> + asm volatile("sha1nexte (0x12345678), %xmm0");
>> + asm volatile("sha1nexte (%rax), %xmm3");
>> + asm volatile("sha1nexte (%rcx,%rax,1), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(,%rax,1), %xmm0");
>> + asm volatile("sha1nexte (%rax,%rcx,1), %xmm0");
>> + asm volatile("sha1nexte (%rax,%rcx,8), %xmm0");
>> + asm volatile("sha1nexte 0x12(%rax), %xmm0");
>> + asm volatile("sha1nexte 0x12(%rbp), %xmm0");
>> + asm volatile("sha1nexte 0x12(%rcx,%rax,1), %xmm0");
>> + asm volatile("sha1nexte 0x12(%rbp,%rax,1), %xmm0");
>> + asm volatile("sha1nexte 0x12(%rax,%rcx,1), %xmm0");
>> + asm volatile("sha1nexte 0x12(%rax,%rcx,8), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(%rax), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(%rbp), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(%rcx,%rax,1), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(%rbp,%rax,1), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(%rax,%rcx,1), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(%rax,%rcx,8), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(%rax,%rcx,8), %xmm15");
>> +
>> + /* sha1msg1 xmm2/m128, xmm1 */
>> +
>> + asm volatile("sha1msg1 %xmm1, %xmm0");
>> + asm volatile("sha1msg1 %xmm7, %xmm2");
>> + asm volatile("sha1msg1 %xmm8, %xmm0");
>> + asm volatile("sha1msg1 %xmm7, %xmm8");
>> + asm volatile("sha1msg1 %xmm15, %xmm8");
>> + asm volatile("sha1msg1 (%rax), %xmm0");
>> + asm volatile("sha1msg1 (%r8), %xmm0");
>> + asm volatile("sha1msg1 (0x12345678), %xmm0");
>> + asm volatile("sha1msg1 (%rax), %xmm3");
>> + asm volatile("sha1msg1 (%rcx,%rax,1), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(,%rax,1), %xmm0");
>> + asm volatile("sha1msg1 (%rax,%rcx,1), %xmm0");
>> + asm volatile("sha1msg1 (%rax,%rcx,8), %xmm0");
>> + asm volatile("sha1msg1 0x12(%rax), %xmm0");
>> + asm volatile("sha1msg1 0x12(%rbp), %xmm0");
>> + asm volatile("sha1msg1 0x12(%rcx,%rax,1), %xmm0");
>> + asm volatile("sha1msg1 0x12(%rbp,%rax,1), %xmm0");
>> + asm volatile("sha1msg1 0x12(%rax,%rcx,1), %xmm0");
>> + asm volatile("sha1msg1 0x12(%rax,%rcx,8), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(%rax), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(%rbp), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(%rcx,%rax,1), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(%rbp,%rax,1), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(%rax,%rcx,1), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(%rax,%rcx,8), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(%rax,%rcx,8), %xmm15");
>> +
>> + /* sha1msg2 xmm2/m128, xmm1 */
>> +
>> + asm volatile("sha1msg2 %xmm1, %xmm0");
>> + asm volatile("sha1msg2 %xmm7, %xmm2");
>> + asm volatile("sha1msg2 %xmm8, %xmm0");
>> + asm volatile("sha1msg2 %xmm7, %xmm8");
>> + asm volatile("sha1msg2 %xmm15, %xmm8");
>> + asm volatile("sha1msg2 (%rax), %xmm0");
>> + asm volatile("sha1msg2 (%r8), %xmm0");
>> + asm volatile("sha1msg2 (0x12345678), %xmm0");
>> + asm volatile("sha1msg2 (%rax), %xmm3");
>> + asm volatile("sha1msg2 (%rcx,%rax,1), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(,%rax,1), %xmm0");
>> + asm volatile("sha1msg2 (%rax,%rcx,1), %xmm0");
>> + asm volatile("sha1msg2 (%rax,%rcx,8), %xmm0");
>> + asm volatile("sha1msg2 0x12(%rax), %xmm0");
>> + asm volatile("sha1msg2 0x12(%rbp), %xmm0");
>> + asm volatile("sha1msg2 0x12(%rcx,%rax,1), %xmm0");
>> + asm volatile("sha1msg2 0x12(%rbp,%rax,1), %xmm0");
>> + asm volatile("sha1msg2 0x12(%rax,%rcx,1), %xmm0");
>> + asm volatile("sha1msg2 0x12(%rax,%rcx,8), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(%rax), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(%rbp), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(%rcx,%rax,1), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(%rbp,%rax,1), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(%rax,%rcx,1), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(%rax,%rcx,8), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(%rax,%rcx,8), %xmm15");
>> +
>> + /* sha256rnds2 <XMM0>, xmm2/m128, xmm1 */
>> + /* Note sha256rnds2 has an implicit operand 'xmm0' */
>> +
>> + asm volatile("sha256rnds2 %xmm4, %xmm1");
>> + asm volatile("sha256rnds2 %xmm7, %xmm2");
>> + asm volatile("sha256rnds2 %xmm8, %xmm1");
>> + asm volatile("sha256rnds2 %xmm7, %xmm8");
>> + asm volatile("sha256rnds2 %xmm15, %xmm8");
>> + asm volatile("sha256rnds2 (%rax), %xmm1");
>> + asm volatile("sha256rnds2 (%r8), %xmm1");
>> + asm volatile("sha256rnds2 (0x12345678), %xmm1");
>> + asm volatile("sha256rnds2 (%rax), %xmm3");
>> + asm volatile("sha256rnds2 (%rcx,%rax,1), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(,%rax,1), %xmm1");
>> + asm volatile("sha256rnds2 (%rax,%rcx,1), %xmm1");
>> + asm volatile("sha256rnds2 (%rax,%rcx,8), %xmm1");
>> + asm volatile("sha256rnds2 0x12(%rax), %xmm1");
>> + asm volatile("sha256rnds2 0x12(%rbp), %xmm1");
>> + asm volatile("sha256rnds2 0x12(%rcx,%rax,1), %xmm1");
>> + asm volatile("sha256rnds2 0x12(%rbp,%rax,1), %xmm1");
>> + asm volatile("sha256rnds2 0x12(%rax,%rcx,1), %xmm1");
>> + asm volatile("sha256rnds2 0x12(%rax,%rcx,8), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(%rax), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(%rbp), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(%rcx,%rax,1), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(%rbp,%rax,1), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(%rax,%rcx,1), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(%rax,%rcx,8), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(%rax,%rcx,8), %xmm15");
>> +
>> + /* sha256msg1 xmm2/m128, xmm1 */
>> +
>> + asm volatile("sha256msg1 %xmm1, %xmm0");
>> + asm volatile("sha256msg1 %xmm7, %xmm2");
>> + asm volatile("sha256msg1 %xmm8, %xmm0");
>> + asm volatile("sha256msg1 %xmm7, %xmm8");
>> + asm volatile("sha256msg1 %xmm15, %xmm8");
>> + asm volatile("sha256msg1 (%rax), %xmm0");
>> + asm volatile("sha256msg1 (%r8), %xmm0");
>> + asm volatile("sha256msg1 (0x12345678), %xmm0");
>> + asm volatile("sha256msg1 (%rax), %xmm3");
>> + asm volatile("sha256msg1 (%rcx,%rax,1), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(,%rax,1), %xmm0");
>> + asm volatile("sha256msg1 (%rax,%rcx,1), %xmm0");
>> + asm volatile("sha256msg1 (%rax,%rcx,8), %xmm0");
>> + asm volatile("sha256msg1 0x12(%rax), %xmm0");
>> + asm volatile("sha256msg1 0x12(%rbp), %xmm0");
>> + asm volatile("sha256msg1 0x12(%rcx,%rax,1), %xmm0");
>> + asm volatile("sha256msg1 0x12(%rbp,%rax,1), %xmm0");
>> + asm volatile("sha256msg1 0x12(%rax,%rcx,1), %xmm0");
>> + asm volatile("sha256msg1 0x12(%rax,%rcx,8), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(%rax), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(%rbp), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(%rcx,%rax,1), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(%rbp,%rax,1), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(%rax,%rcx,1), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(%rax,%rcx,8), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(%rax,%rcx,8), %xmm15");
>> +
>> + /* sha256msg2 xmm2/m128, xmm1 */
>> +
>> + asm volatile("sha256msg2 %xmm1, %xmm0");
>> + asm volatile("sha256msg2 %xmm7, %xmm2");
>> + asm volatile("sha256msg2 %xmm8, %xmm0");
>> + asm volatile("sha256msg2 %xmm7, %xmm8");
>> + asm volatile("sha256msg2 %xmm15, %xmm8");
>> + asm volatile("sha256msg2 (%rax), %xmm0");
>> + asm volatile("sha256msg2 (%r8), %xmm0");
>> + asm volatile("sha256msg2 (0x12345678), %xmm0");
>> + asm volatile("sha256msg2 (%rax), %xmm3");
>> + asm volatile("sha256msg2 (%rcx,%rax,1), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(,%rax,1), %xmm0");
>> + asm volatile("sha256msg2 (%rax,%rcx,1), %xmm0");
>> + asm volatile("sha256msg2 (%rax,%rcx,8), %xmm0");
>> + asm volatile("sha256msg2 0x12(%rax), %xmm0");
>> + asm volatile("sha256msg2 0x12(%rbp), %xmm0");
>> + asm volatile("sha256msg2 0x12(%rcx,%rax,1), %xmm0");
>> + asm volatile("sha256msg2 0x12(%rbp,%rax,1), %xmm0");
>> + asm volatile("sha256msg2 0x12(%rax,%rcx,1), %xmm0");
>> + asm volatile("sha256msg2 0x12(%rax,%rcx,8), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(%rax), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(%rbp), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(%rcx,%rax,1), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(%rbp,%rax,1), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(%rax,%rcx,1), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(%rax,%rcx,8), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(%rax,%rcx,8), %xmm15");
>> +
>> #else /* #ifdef __x86_64__ */
>>
>> /* bndmk m32, bnd */
>> @@ -407,6 +611,175 @@ int main(void)
>> asm volatile("bnd jmp *(%ecx)"); /* Expecting: jmp indirect 0 */
>> asm volatile("bnd jne label1"); /* Expecting: jcc conditional 0xfffffffc */
>>
>> + /* sha1rnds4 imm8, xmm2/m128, xmm1 */
>> +
>> + asm volatile("sha1rnds4 $0x0, %xmm1, %xmm0");
>> + asm volatile("sha1rnds4 $0x91, %xmm7, %xmm2");
>> + asm volatile("sha1rnds4 $0x91, (%eax), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, (0x12345678), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, (%eax), %xmm3");
>> + asm volatile("sha1rnds4 $0x91, (%ecx,%eax,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(,%eax,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, (%eax,%ecx,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, (%eax,%ecx,8), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12(%eax), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12(%ebp), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12(%ecx,%eax,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12(%ebp,%eax,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12(%eax,%ecx,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12(%eax,%ecx,8), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(%eax), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(%ebp), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(%ecx,%eax,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(%ebp,%eax,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(%eax,%ecx,1), %xmm0");
>> + asm volatile("sha1rnds4 $0x91, 0x12345678(%eax,%ecx,8), %xmm0");
>> +
>> + /* sha1nexte xmm2/m128, xmm1 */
>> +
>> + asm volatile("sha1nexte %xmm1, %xmm0");
>> + asm volatile("sha1nexte %xmm7, %xmm2");
>> + asm volatile("sha1nexte (%eax), %xmm0");
>> + asm volatile("sha1nexte (0x12345678), %xmm0");
>> + asm volatile("sha1nexte (%eax), %xmm3");
>> + asm volatile("sha1nexte (%ecx,%eax,1), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(,%eax,1), %xmm0");
>> + asm volatile("sha1nexte (%eax,%ecx,1), %xmm0");
>> + asm volatile("sha1nexte (%eax,%ecx,8), %xmm0");
>> + asm volatile("sha1nexte 0x12(%eax), %xmm0");
>> + asm volatile("sha1nexte 0x12(%ebp), %xmm0");
>> + asm volatile("sha1nexte 0x12(%ecx,%eax,1), %xmm0");
>> + asm volatile("sha1nexte 0x12(%ebp,%eax,1), %xmm0");
>> + asm volatile("sha1nexte 0x12(%eax,%ecx,1), %xmm0");
>> + asm volatile("sha1nexte 0x12(%eax,%ecx,8), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(%eax), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(%ebp), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(%ecx,%eax,1), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(%ebp,%eax,1), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(%eax,%ecx,1), %xmm0");
>> + asm volatile("sha1nexte 0x12345678(%eax,%ecx,8), %xmm0");
>> +
>> + /* sha1msg1 xmm2/m128, xmm1 */
>> +
>> + asm volatile("sha1msg1 %xmm1, %xmm0");
>> + asm volatile("sha1msg1 %xmm7, %xmm2");
>> + asm volatile("sha1msg1 (%eax), %xmm0");
>> + asm volatile("sha1msg1 (0x12345678), %xmm0");
>> + asm volatile("sha1msg1 (%eax), %xmm3");
>> + asm volatile("sha1msg1 (%ecx,%eax,1), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(,%eax,1), %xmm0");
>> + asm volatile("sha1msg1 (%eax,%ecx,1), %xmm0");
>> + asm volatile("sha1msg1 (%eax,%ecx,8), %xmm0");
>> + asm volatile("sha1msg1 0x12(%eax), %xmm0");
>> + asm volatile("sha1msg1 0x12(%ebp), %xmm0");
>> + asm volatile("sha1msg1 0x12(%ecx,%eax,1), %xmm0");
>> + asm volatile("sha1msg1 0x12(%ebp,%eax,1), %xmm0");
>> + asm volatile("sha1msg1 0x12(%eax,%ecx,1), %xmm0");
>> + asm volatile("sha1msg1 0x12(%eax,%ecx,8), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(%eax), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(%ebp), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(%ecx,%eax,1), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(%ebp,%eax,1), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(%eax,%ecx,1), %xmm0");
>> + asm volatile("sha1msg1 0x12345678(%eax,%ecx,8), %xmm0");
>> +
>> + /* sha1msg2 xmm2/m128, xmm1 */
>> +
>> + asm volatile("sha1msg2 %xmm1, %xmm0");
>> + asm volatile("sha1msg2 %xmm7, %xmm2");
>> + asm volatile("sha1msg2 (%eax), %xmm0");
>> + asm volatile("sha1msg2 (0x12345678), %xmm0");
>> + asm volatile("sha1msg2 (%eax), %xmm3");
>> + asm volatile("sha1msg2 (%ecx,%eax,1), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(,%eax,1), %xmm0");
>> + asm volatile("sha1msg2 (%eax,%ecx,1), %xmm0");
>> + asm volatile("sha1msg2 (%eax,%ecx,8), %xmm0");
>> + asm volatile("sha1msg2 0x12(%eax), %xmm0");
>> + asm volatile("sha1msg2 0x12(%ebp), %xmm0");
>> + asm volatile("sha1msg2 0x12(%ecx,%eax,1), %xmm0");
>> + asm volatile("sha1msg2 0x12(%ebp,%eax,1), %xmm0");
>> + asm volatile("sha1msg2 0x12(%eax,%ecx,1), %xmm0");
>> + asm volatile("sha1msg2 0x12(%eax,%ecx,8), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(%eax), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(%ebp), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(%ecx,%eax,1), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(%ebp,%eax,1), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(%eax,%ecx,1), %xmm0");
>> + asm volatile("sha1msg2 0x12345678(%eax,%ecx,8), %xmm0");
>> +
>> + /* sha256rnds2 <XMM0>, xmm2/m128, xmm1 */
>> + /* Note sha256rnds2 has an implicit operand 'xmm0' */
>> +
>> + asm volatile("sha256rnds2 %xmm4, %xmm1");
>> + asm volatile("sha256rnds2 %xmm7, %xmm2");
>> + asm volatile("sha256rnds2 (%eax), %xmm1");
>> + asm volatile("sha256rnds2 (0x12345678), %xmm1");
>> + asm volatile("sha256rnds2 (%eax), %xmm3");
>> + asm volatile("sha256rnds2 (%ecx,%eax,1), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(,%eax,1), %xmm1");
>> + asm volatile("sha256rnds2 (%eax,%ecx,1), %xmm1");
>> + asm volatile("sha256rnds2 (%eax,%ecx,8), %xmm1");
>> + asm volatile("sha256rnds2 0x12(%eax), %xmm1");
>> + asm volatile("sha256rnds2 0x12(%ebp), %xmm1");
>> + asm volatile("sha256rnds2 0x12(%ecx,%eax,1), %xmm1");
>> + asm volatile("sha256rnds2 0x12(%ebp,%eax,1), %xmm1");
>> + asm volatile("sha256rnds2 0x12(%eax,%ecx,1), %xmm1");
>> + asm volatile("sha256rnds2 0x12(%eax,%ecx,8), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(%eax), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(%ebp), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(%ecx,%eax,1), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(%ebp,%eax,1), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(%eax,%ecx,1), %xmm1");
>> + asm volatile("sha256rnds2 0x12345678(%eax,%ecx,8), %xmm1");
>> +
>> + /* sha256msg1 xmm2/m128, xmm1 */
>> +
>> + asm volatile("sha256msg1 %xmm1, %xmm0");
>> + asm volatile("sha256msg1 %xmm7, %xmm2");
>> + asm volatile("sha256msg1 (%eax), %xmm0");
>> + asm volatile("sha256msg1 (0x12345678), %xmm0");
>> + asm volatile("sha256msg1 (%eax), %xmm3");
>> + asm volatile("sha256msg1 (%ecx,%eax,1), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(,%eax,1), %xmm0");
>> + asm volatile("sha256msg1 (%eax,%ecx,1), %xmm0");
>> + asm volatile("sha256msg1 (%eax,%ecx,8), %xmm0");
>> + asm volatile("sha256msg1 0x12(%eax), %xmm0");
>> + asm volatile("sha256msg1 0x12(%ebp), %xmm0");
>> + asm volatile("sha256msg1 0x12(%ecx,%eax,1), %xmm0");
>> + asm volatile("sha256msg1 0x12(%ebp,%eax,1), %xmm0");
>> + asm volatile("sha256msg1 0x12(%eax,%ecx,1), %xmm0");
>> + asm volatile("sha256msg1 0x12(%eax,%ecx,8), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(%eax), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(%ebp), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(%ecx,%eax,1), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(%ebp,%eax,1), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(%eax,%ecx,1), %xmm0");
>> + asm volatile("sha256msg1 0x12345678(%eax,%ecx,8), %xmm0");
>> +
>> + /* sha256msg2 xmm2/m128, xmm1 */
>> +
>> + asm volatile("sha256msg2 %xmm1, %xmm0");
>> + asm volatile("sha256msg2 %xmm7, %xmm2");
>> + asm volatile("sha256msg2 (%eax), %xmm0");
>> + asm volatile("sha256msg2 (0x12345678), %xmm0");
>> + asm volatile("sha256msg2 (%eax), %xmm3");
>> + asm volatile("sha256msg2 (%ecx,%eax,1), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(,%eax,1), %xmm0");
>> + asm volatile("sha256msg2 (%eax,%ecx,1), %xmm0");
>> + asm volatile("sha256msg2 (%eax,%ecx,8), %xmm0");
>> + asm volatile("sha256msg2 0x12(%eax), %xmm0");
>> + asm volatile("sha256msg2 0x12(%ebp), %xmm0");
>> + asm volatile("sha256msg2 0x12(%ecx,%eax,1), %xmm0");
>> + asm volatile("sha256msg2 0x12(%ebp,%eax,1), %xmm0");
>> + asm volatile("sha256msg2 0x12(%eax,%ecx,1), %xmm0");
>> + asm volatile("sha256msg2 0x12(%eax,%ecx,8), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(%eax), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(%ebp), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(%ecx,%eax,1), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(%ebp,%eax,1), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(%eax,%ecx,1), %xmm0");
>> + asm volatile("sha256msg2 0x12345678(%eax,%ecx,8), %xmm0");
>> +
>> #endif /* #ifndef __x86_64__ */
>>
>> /* Following line is a marker for the awk script - do not change */
>> diff --git a/tools/perf/util/intel-pt-decoder/x86-opcode-map.txt b/tools/perf/util/intel-pt-decoder/x86-opcode-map.txt
>> index a02a195d219c..25dad388b371 100644
>> --- a/tools/perf/util/intel-pt-decoder/x86-opcode-map.txt
>> +++ b/tools/perf/util/intel-pt-decoder/x86-opcode-map.txt
>> @@ -736,6 +736,12 @@ bd: vfnmadd231ss/d Vx,Hx,Wx (66),(v),(v1)
>> be: vfnmsub231ps/d Vx,Hx,Wx (66),(v)
>> bf: vfnmsub231ss/d Vx,Hx,Wx (66),(v),(v1)
>> # 0x0f 0x38 0xc0-0xff
>> +c8: sha1nexte Vdq,Wdq
>> +c9: sha1msg1 Vdq,Wdq
>> +ca: sha1msg2 Vdq,Wdq
>> +cb: sha256rnds2 Vdq,Wdq
>> +cc: sha256msg1 Vdq,Wdq
>> +cd: sha256msg2 Vdq,Wdq
>> db: VAESIMC Vdq,Wdq (66),(v1)
>> dc: VAESENC Vdq,Hdq,Wdq (66),(v1)
>> dd: VAESENCLAST Vdq,Hdq,Wdq (66),(v1)
>> @@ -794,6 +800,7 @@ AVXcode: 3
>> 61: vpcmpestri Vdq,Wdq,Ib (66),(v1)
>> 62: vpcmpistrm Vdq,Wdq,Ib (66),(v1)
>> 63: vpcmpistri Vdq,Wdq,Ib (66),(v1)
>> +cc: sha1rnds4 Vdq,Wdq,Ib
>> df: VAESKEYGEN Vdq,Wdq,Ib (66),(v1)
>> f0: RORX Gy,Ey,Ib (F2),(v)
>> EndTable
>> --
>> 1.9.1
--
To unsubscribe from this list: send the line "unsubscribe linux-kernel" in
the body of a message to majordomo@...r.kernel.org
More majordomo info at http://vger.kernel.org/majordomo-info.html
Please read the FAQ at http://www.tux.org/lkml/
Powered by blists - more mailing lists