blob: 1b6271c613fde83e8ec4bb4af94d1943a699ffdb [file] [log] [blame]
/* **********************************************************
* Copyright (c) 2014 Google, Inc. All rights reserved.
* **********************************************************/
/*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of Google, Inc. nor the names of its contributors may be
* used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL VMWARE, INC. OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*/
#include "../globals.h" /* need this to include decode.h (uint, etc.) */
#include "arch.h" /* need this to include decode.h (byte, etc. */
#include "decode.h"
#include "decode_private.h"
#include "table_private.h"
/* The D bit (0x04) has been removed. We are including the U bit. Is
* there some other order we could do to group the related instrs better
* while still minimizing the number of table lookup steps and keeping
* dense tables?
*/
const instr_info_t A32_unpred_opc7[] = {
/* {op/type, op encoding, name, dst1, src1, src2, src3, src4, flags, eflags, code} */
/* 00 */
{INVALID, 0xf0000000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf0100000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf0200000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf0300000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf0800000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf0900000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf0a00000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf0b00000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
/* 10 */
{EXT_BITS16, 0xf1000000, "(ext bits16 8)", xx, xx, xx, xx, xx, no, x, 8},
{INVALID, 0xf1100000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf1200000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf1300000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf1800000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf1900000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf1a00000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf1b00000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
/* 20 */
{EXT_SIMD6, 0xf2000000, "(ext simd6 0)", xx, xx, xx, xx, xx, no, x, 0},
{EXT_SIMD6, 0xf2100000, "(ext simd6 1)", xx, xx, xx, xx, xx, no, x, 1},
{EXT_SIMD6, 0xf2200000, "(ext simd6 2)", xx, xx, xx, xx, xx, no, x, 2},
{EXT_SIMD6, 0xf2300000, "(ext simd6 3)", xx, xx, xx, xx, xx, no, x, 3},
{EXT_BIT19, 0xf2800000, "(ext bit19 0)", xx, xx, xx, xx, xx, no, x, 0},
{EXT_SIMD6, 0xf2900000, "(ext simd6 4)", xx, xx, xx, xx, xx, no, x, 4},
{EXT_SIMD6, 0xf2a00000, "(ext simd6 5)", xx, xx, xx, xx, xx, no, x, 5},
{EXT_BIT4, 0xf2b00000, "(ext bit4 13)", xx, xx, xx, xx, xx, no, x, 13},
/* 30 */
{EXT_SIMD6, 0xf3000000, "(ext simd6 6)", xx, xx, xx, xx, xx, no, x, 6},
{EXT_SIMD6, 0xf3100000, "(ext simd6 7)", xx, xx, xx, xx, xx, no, x, 7},
{EXT_SIMD6, 0xf3200000, "(ext simd6 8)", xx, xx, xx, xx, xx, no, x, 8},
{EXT_SIMD6, 0xf3300000, "(ext simd6 9)", xx, xx, xx, xx, xx, no, x, 9},
{EXT_BIT19, 0xf3800000, "(ext bit19 1)", xx, xx, xx, xx, xx, no, x, 1},
{EXT_SIMD6, 0xf3900000, "(ext simd6 10)", xx, xx, xx, xx, xx, no, x, 10},
{EXT_SIMD6, 0xf3a00000, "(ext simd6 11)", xx, xx, xx, xx, xx, no, x, 11},
{EXT_BITS16, 0xf3b00000, "(ext bits16 9)", xx, xx, xx, xx, xx, no, x, 9},
/* 40 */
{EXT_VLDA, 0xf4000000, "(ext vldA 0)", xx, xx, xx, xx, xx, no, x, 0},
{OP_pli, 0xf450f000, "pli", xx, xx, MN12z, xx, xx, no, x, top7[0x35]},/*PUW=000*/
{EXT_VLDA, 0xf4200000, "(ext vldA 1)", xx, xx, xx, xx, xx, no, x, 1},
{INVALID, 0xf4300000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{EXT_VLDB, 0xf4800000, "(ext vldB 0)", xx, xx, xx, xx, xx, no, x, 0},
{OP_pli, 0xf4d0f000, "pli", xx, xx, MP12z, xx, xx, no, x, top7[0x21]},/*PUW=010*/
{EXT_VLDB, 0xf4a00000, "(ext vldB 1)", xx, xx, xx, xx, xx, no, x, 1},
{INVALID, 0xf4b00000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
/* 50 */
{INVALID, 0xf5000000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{EXT_BIT22, 0xf5100000, "(ext bit22 0)", xx, xx, xx, xx, xx, no, x, 0},
{INVALID, 0xf5200000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{EXT_FPB, 0xf5300000, "(ext fpb 14)", xx, xx, xx, xx, xx, no, x, 14},
{INVALID, 0xf5800000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{EXT_BIT22, 0xf5900000, "(ext bit22 1)", xx, xx, xx, xx, xx, no, x, 1},
{INVALID, 0xf5a00000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf5b00000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
/* 60 */
{INVALID, 0xf6000000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_pli, 0xf650f000, "pli", xx, xx, MNSz, xx, xx, no, x, END_LIST},/*PUW=000*/
{INVALID, 0xf6200000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf6300000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf6800000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_pli, 0xf6d0f000, "pli", xx, xx, MPSz, xx, xx, no, x, top7[0x31]},/*PUW=010*/
{INVALID, 0xf6a00000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf6b00000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
/* 70 */
{INVALID, 0xf7000000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{EXT_BIT22, 0xf7100000, "(ext bit22 2)", xx, xx, xx, xx, xx, no, x, 2},
{INVALID, 0xf7200000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf7300000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf7800000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{EXT_BIT22, 0xf7900000, "(ext bit22 3)", xx, xx, xx, xx, xx, no, x, 3},
{INVALID, 0xf7a00000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf7b00000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
/* 80 */
{OP_srsda, 0xf84d0500, "srsda", Mq, xx, xx, xx, xx, no, x, END_LIST},/*PUW=000*/
{OP_rfeda, 0xf8100a00, "rfeda", xx, xx, Mq, xx, xx, no, x, END_LIST},/*PUW=000*/
{OP_srsda, 0xf86d0500, "srsda", Mq, SPw, SPw, xx, xx, no, x, top7[0x40]},/*PUW=001*/
{OP_rfeda, 0xf8300a00, "rfeda", RAw, xx, Mq, RAw, xx, no, x, top7[0x41]},/*PUW=001*/
{OP_srsdb, 0xf8cd0500, "srsdb", Mq, xx, xx, xx, xx, no, x, END_LIST},/*PUW=010*/
{OP_rfedb, 0xf8900a00, "rfedb", xx, xx, Mq, xx, xx, no, x, END_LIST},/*PUW=010*/
{OP_srsdb, 0xf8ed0500, "srsdb", Mq, SPw, SPw, xx, xx, no, x, top7[0x44]},/*PUW=011*/
{OP_rfedb, 0xf8b00a00, "rfedb", RAw, xx, Mq, RAw, xx, no, x, top7[0x45]},/*PUW=011*/
/* 90 */
{OP_srsia, 0xf94d0500, "srsia", Mq, xx, xx, xx, xx, no, x, top7[0x4a]},/*PUW=100*/
{OP_rfeia, 0xf9100a00, "rfeia", xx, xx, Mq, xx, xx, no, x, top7[0x4b]},/*PUW=100*/
{OP_srsia, 0xf96d0500, "srsia", Mq, SPw, SPw, xx, xx, no, x, END_LIST},/*PUW=101*/
{OP_rfeia, 0xf9300a00, "rfeia", RAw, xx, Mq, RAw, xx, no, x, END_LIST},/*PUW=101*/
{OP_srsib, 0xf9cd0500, "srsib", Mq, xx, xx, xx, xx, no, x, top7[0x4e]},/*PUW=110*/
{OP_rfeib, 0xf9900a00, "rfeib", xx, xx, Mq, xx, xx, no, x, top7[0x4f]},/*PUW=110*/
{OP_srsib, 0xf9ed0500, "srsib", Mq, SPw, SPw, xx, xx, no, x, END_LIST},/*PUW=111*/
{OP_rfeib, 0xf9b00a00, "rfeib", RAw, xx, Mq, RAw, xx, no, x, END_LIST},/*PUW=111*/
/* a0 */
{OP_blx, 0xfa000000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
{OP_blx, 0xfa100000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
{OP_blx, 0xfa200000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
{OP_blx, 0xfa300000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
{OP_blx, 0xfa800000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
{OP_blx, 0xfa900000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
{OP_blx, 0xfaa00000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
{OP_blx, 0xfab00000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
/* b0 */
{OP_blx, 0xfb000000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
{OP_blx, 0xfb000000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
{OP_blx, 0xfb000000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
{OP_blx, 0xfb000000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
{OP_blx, 0xfb000000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
{OP_blx, 0xfb000000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
{OP_blx, 0xfb000000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
{OP_blx, 0xfb000000, "blx", xx, xx, i25x0_24, xx, xx, no, x, END_LIST},
/* c0 */
{EXT_BITS20, 0xfc000000, "(ext bits20 0)", xx, xx, xx, xx, xx, no, x, 0},
{EXT_BITS20, 0xfc100000, "(ext bits20 0)", xx, xx, xx, xx, xx, no, x, 0},
{EXT_BITS20, 0xfc200000, "(ext bits20 0)", xx, xx, xx, xx, xx, no, x, 0},
{EXT_BITS20, 0xfc300000, "(ext bits20 0)", xx, xx, xx, xx, xx, no, x, 0},
{EXT_BITS20, 0xfc800000, "(ext bits20 0)", xx, xx, xx, xx, xx, no, x, 0},
{EXT_BITS20, 0xfc900000, "(ext bits20 0)", xx, xx, xx, xx, xx, no, x, 0},
{EXT_BITS20, 0xfca00000, "(ext bits20 0)", xx, xx, xx, xx, xx, no, x, 0},
{EXT_BITS20, 0xfcb00000, "(ext bits20 0)", xx, xx, xx, xx, xx, no, x, 0},
/* d0 */
{EXT_BITS20, 0xfd000000, "(ext bits20 1)", xx, xx, xx, xx, xx, no, x, 1},
{EXT_BITS20, 0xfd100000, "(ext bits20 1)", xx, xx, xx, xx, xx, no, x, 1},
{EXT_BITS20, 0xfd200000, "(ext bits20 1)", xx, xx, xx, xx, xx, no, x, 1},
{EXT_BITS20, 0xfd300000, "(ext bits20 1)", xx, xx, xx, xx, xx, no, x, 1},
{EXT_BITS20, 0xfd800000, "(ext bits20 1)", xx, xx, xx, xx, xx, no, x, 1},
{EXT_BITS20, 0xfd900000, "(ext bits20 1)", xx, xx, xx, xx, xx, no, x, 1},
{EXT_BITS20, 0xfda00000, "(ext bits20 1)", xx, xx, xx, xx, xx, no, x, 1},
{EXT_BITS20, 0xfdb00000, "(ext bits20 1)", xx, xx, xx, xx, xx, no, x, 1},
/* e0 */
{EXT_FP, 0xfe000000, "(ext fp 46)", xx, xx, xx, xx, xx, no, x, 46},
{EXT_FP, 0xfe100000, "(ext fp 47)", xx, xx, xx, xx, xx, no, x, 47},
{EXT_FP, 0xfe200000, "(ext fp 48)", xx, xx, xx, xx, xx, no, x, 48},
{EXT_FP, 0xfe300000, "(ext fp 49)", xx, xx, xx, xx, xx, no, x, 49},
{EXT_FP, 0xfe800000, "(ext fp 50)", xx, xx, xx, xx, xx, no, x, 50},
{INVALID, 0xfe900000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xfea00000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{EXT_SIMD5B, 0xfeb00000, "(ext simd5b 0)", xx, xx, xx, xx, xx, no, x, 0},
/* f0 */
{INVALID, 0xff000000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xff100000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xff200000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xff300000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xff800000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xff900000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xffa00000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xffb00000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
};
/* Indexed by bits 23:20 */
const instr_info_t A32_ext_bits20[][16] = {
{ /* 0 */
{INVALID, 0xfc000000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},/*PUW=000*/
{INVALID, 0xfc100000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},/*PUW=000*/
{OP_stc2, 0xfc200000, "stc2", MN8w, RAw, i4_8, CRBw, n8, xop_wb, x, END_LIST},/*PUW=001*/
{OP_ldc2, 0xfc300000, "ldc2", CRBw, RAw, MN8w, i4_8, n8, xop_wb, x, END_LIST},/*PUW=001*/
{OP_mcrr2, 0xfc400000, "mcrr2", CRDw, RAw, RBw, i4_8, i4_7, srcX4, x, END_LIST},
{OP_mrrc2, 0xfc500000, "mrrc2", RBw, RAw, i4_8, i4_7, CRDw, no, x, END_LIST},
{OP_stc2l, 0xfc600000, "stc2l", MN8w, RAw, i4_8, CRBw, n8, xop_wb, x, END_LIST},/*PUW=001*/
{OP_ldc2l, 0xfc700000, "ldc2l", CRBw, RAw, MN8w, i4_8, n8, xop_wb, x, END_LIST},/*PUW=001*/
{OP_stc2, 0xfc800000, "stc2", MP8w, xx, i4_8, CRBw, i8, no, x, tb20[0][0x02]},/*PUW=010*/
{OP_ldc2, 0xfc900000, "ldc2", CRBw, xx, MP8w, i4_8, i8, no, x, tb20[0][0x03]},/*PUW=010*/
{OP_stc2, 0xfca00000, "stc2", MP8w, RAw, i4_8, CRBw, i8, xop_wb, x, tb20[0][0x08]},/*PUW=011*/
{OP_ldc2, 0xfcb00000, "ldc2", CRBw, RAw, MP8w, i4_8, i8, xop_wb, x, tb20[0][0x09]},/*PUW=011*/
{OP_stc2l, 0xfcc00000, "stc2l", MP8w, xx, i4_8, CRBw, i8, no, x, tb20[0][0x06]},/*PUW=010*/
{OP_ldc2l, 0xfcd00000, "ldc2l", CRBw, xx, MP8w, i4_8, i8, no, x, tb20[0][0x07]},/*PUW=010*/
{OP_stc2l, 0xfce00000, "stc2l", MP8w, RAw, i4_8, CRBw, i8, xop_wb, x, tb20[0][0x0c]},/*PUW=011*/
{OP_ldc2l, 0xfcf00000, "ldc2l", CRBw, RAw, MP8w, i4_8, i8, xop_wb, x, tb20[0][0x0d]},/*PUW=011*/
}, { /* 1 */
{OP_stc2, 0xfd000000, "stc2", MN8w, xx, i4_8, CRBw, n8, no, x, tb20[0][0x0a]},/*PUW=100*/
{OP_ldc2, 0xfd100000, "ldc2", CRBw, xx, MN8w, i4_8, i8, no, x, tb20[0][0x0b]},/*PUW=100*/
{OP_stc2, 0xfd200000, "stc2", MN8w, RAw, i4_8, CRBw, n8, xop_wb, x, END_LIST},/*PUW=101*/
{OP_ldc2, 0xfd300000, "ldc2", CRBw, RAw, MN8w, i4_8, n8, xop_wb, x, END_LIST},/*PUW=101*/
{OP_stc2l, 0xfd400000, "stc2l", MN8w, xx, i4_8, CRBw, n8, no, x, tb20[0][0x0e]},/*PUW=100*/
{OP_ldc2l, 0xfd500000, "ldc2l", CRBw, xx, MN8w, i4_8, i8, no, x, tb20[0][0x0f]},/*PUW=100*/
{OP_stc2l, 0xfd600000, "stc2l", MN8w, RAw, i4_8, CRBw, n8, xop_wb, x, END_LIST},/*PUW=101*/
{OP_ldc2l, 0xfd700000, "ldc2l", CRBw, RAw, MN8w, i4_8, n8, xop_wb, x, END_LIST},/*PUW=101*/
{OP_stc2, 0xfd800000, "stc2", MP8w, xx, i4_8, CRBw, i8, no, x, END_LIST},/*PUW=110*/
{OP_ldc2, 0xfd900000, "ldc2", CRBw, xx, MP8w, i4_8, i8, no, x, END_LIST},/*PUW=110*/
{OP_stc2, 0xfda00000, "stc2", MP8w, RAw, i4_8, CRBw, i8, xop_wb, x, END_LIST},/*PUW=111*/
{OP_ldc2, 0xfdb00000, "ldc2", CRBw, RAw, MP8w, i4_8, i8, xop_wb, x, END_LIST},/*PUW=111*/
{OP_stc2l, 0xfdc00000, "stc2l", MP8w, xx, i4_8, CRBw, i8, no, x, END_LIST},/*PUW=110*/
{OP_ldc2l, 0xfdd00000, "ldc2l", CRBw, xx, MP8w, i4_8, i8, no, x, END_LIST},/*PUW=110*/
{OP_stc2l, 0xfde00000, "stc2l", MP8w, RAw, i4_8, CRBw, i8, xop_wb, x, END_LIST},/*PUW=111*/
{OP_ldc2l, 0xfdf00000, "ldc2l", CRBw, RAw, MP8w, i4_8, i8, xop_wb, x, END_LIST},/*PUW=111*/
},
};
/* Indexed by whether imm4 in 20:16 is zero or not */
const instr_info_t A32_ext_imm2016[][2] = {
{ /* 0 */
{OP_vmovl_s32, 0xf2a00a10, "vmovl.s32", VBdq, xx, VCq, xx, xx, no, x, END_LIST},
{OP_vshll_s32, 0xf2a00a10, "vshll.s32", VBdq, xx, VCq, i5_16, xx, no, x, END_LIST},/*20:16 cannot be 0*/
}, { /* 1 */
{OP_vmovl_u32, 0xf3a00a10, "vmovl.u32", VBdq, xx, VCq, xx, xx, no, x, END_LIST},
{OP_vshll_u32, 0xf3a00a10, "vshll.u32", VBdq, xx, VCq, i5_16, xx, no, x, END_LIST},/*20:16 cannot be 0*/
},
};
/* Indexed by whether imm4 in 18:16 is zero or not */
const instr_info_t A32_ext_imm1816[][2] = {
{ /* 0 */
{OP_vmovl_s8, 0xf2880a10, "vmovl.s8", VBdq, xx, VCq, xx, xx, no, x, END_LIST},
{OP_vshll_s8, 0xf2880a10, "vshll.s8", VBdq, xx, VCq, i3_16, xx, no, x, END_LIST},/*18:16 cannot be 0*/
}, { /* 1 */
{OP_vmovl_u8, 0xf3880a10, "vmovl.u8", VBdq, xx, VCq, xx, xx, no, x, END_LIST},
{OP_vshll_u8, 0xf3880a10, "vshll.u8", VBdq, xx, VCq, i3_16, xx, no, x, END_LIST},/*18:16 cannot be 0*/
},
};
/* Indexed by bit 6 */
const instr_info_t A32_ext_bit6[][2] = {
{ /* 0 */
{OP_vext, 0xf2b00000, "vext.8", VBq, xx, VAq, VCq, i4_8, no, x, tb6[0][0x01]},/*XXX: reads from part of srcs, but complex which part*/
{OP_vext, 0xf2b00040, "vext.8", VBdq, xx, VAdq, VCdq, i4_8, no, x, END_LIST},/*XXX: reads from part of srcs, but complex which part*/
}, { /* 1 */
{OP_vmaxnm_f32, 0xfe800a00, "vmaxnm.f32", WBd, xx, WAd, WCd, xx, v8|vfp, x, END_LIST},
{OP_vminnm_f32, 0xfe800a40, "vminnm.f32", WBd, xx, WAd, WCd, xx, v8|vfp, x, END_LIST},
}, { /* 2 */
{OP_vmaxnm_f64, 0xfe800b00, "vmaxnm.f64", VBq, xx, VAq, VCq, xx, v8|vfp, x, END_LIST},
{OP_vminnm_f64, 0xfe800b40, "vminnm.f64", VBq, xx, VAq, VCq, xx, v8|vfp, x, END_LIST},
},
};
/* Indexed by bit 7 */
const instr_info_t A32_ext_bit7[][2] = {
{ /* 0 */
{OP_vand, 0xf2000110, "vand", VBq, xx, VAq, VCq, xx, no, x, tb7[1][0x00]},
{OP_vsra_s64, 0xf2000190, "vsra.s64", VBq, xx, VCq, i6_16, xx, no, x, tb7[1][0x01]},/*XXX: imm = 64-imm*/
}, { /* 1 */
{OP_vand, 0xf2000150, "vand", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vsra_s64, 0xf20001d0, "vsra.s64", VBdq, xx, VCdq, i6_16, xx, no, x, END_LIST},/*XXX: imm = 64-imm*/
}, { /* 2 */
{OP_vqsub_s8, 0xf2000210, "vqsub.s8", VBq, xx, VAq, VCq, xx, no, x, tb7[3][0x00]},
{OP_vrshr_s64, 0xf2000290, "vrshr.s64", VBq, xx, VCq, i6_16, xx, no, x, tb7[3][0x01]},/*XXX: imm = 64-imm*/
}, { /* 3 */
{OP_vqsub_s8, 0xf2000250, "vqsub.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrshr_s64, 0xf20002d0, "vrshr.s64", VBdq, xx, VCdq, i6_16, xx, no, x, END_LIST},/*XXX: imm = 64-imm*/
}, { /* 4 */
{OP_veor, 0xf3000110, "veor", VBq, xx, VAq, VCq, xx, no, x, tb7[5][0x00]},
{OP_vsra_u64, 0xf3000190, "vsra.u64", VBq, xx, VCq, i6_16, xx, no, x, tb7[5][0x01]},/*XXX: imm = 64-imm*/
}, { /* 5 */
{OP_veor, 0xf3000150, "veor", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vsra_u64, 0xf30001d0, "vsra.u64", VBdq, xx, VCdq, i6_16, xx, no, x, END_LIST},/*XXX: imm = 64-imm*/
}, { /* 6 */
{OP_vqsub_u8, 0xf3000210, "vqsub.u8", VBq, xx, VAq, VCq, xx, no, x, tb7[7][0x00]},
{OP_vrshr_u64, 0xf3000290, "vrshr.u64", VBq, xx, VCq, i6_16, xx, no, x, tb7[7][0x01]},/*XXX: imm = 64-imm*/
}, { /* 7 */
{OP_vqsub_u8, 0xf3000250, "vqsub.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrshr_u64, 0xf30002d0, "vrshr.u64", VBdq, xx, VCdq, i6_16, xx, no, x, END_LIST},/*XXX: imm = 64-imm*/
},
};
/* Indexed by bit 19. This up-front split is simpler than having to split
* 37+ entries inside A32_ext_simd5[] into 2-entry members of this table.
*/
const instr_info_t A32_ext_bit19[][2] = {
{ /* 0 */
{EXT_SIMD8, 0xf2800000, "(ext simd8 0)", xx, xx, xx, xx, xx, no, x, 0},
{EXT_SIMD5, 0xf2880000, "(ext simd5 0)", xx, xx, xx, xx, xx, no, x, 0},
}, { /* 1 */
{EXT_SIMD8, 0xf3800000, "(ext simd8 1)", xx, xx, xx, xx, xx, no, x, 1},
{EXT_SIMD5, 0xf3880000, "(ext simd5 2)", xx, xx, xx, xx, xx, no, x, 2},
},
};
/* Indexed by bit 22 */
const instr_info_t A32_ext_bit22[][2] = {
{ /* 0 */
{OP_pldw, 0xf510f000, "pld", xx, xx, MN12z, xx, xx, no, x, tb22[3][0x00]},/*PUW=100*/
{OP_pld, 0xf550f000, "pld", xx, xx, MN12z, xx, xx, no, x, tb22[3][0x01]},/*PUW=100*/
}, { /* 1 */
{OP_pldw, 0xf590f000, "pld", xx, xx, MP12z, xx, xx, no, x, tb22[0][0x00]},/*PUW=110*/
{OP_pld, 0xf5d0f000, "pld", xx, xx, MP12z, xx, xx, no, x, tb22[0][0x01]},/*PUW=110*/
}, { /* 2 */
{OP_pldw, 0xf710f000, "pld", xx, xx, MNSz, xx, xx, no, x, END_LIST},/*PUW=100*/
{OP_pld, 0xf750f000, "pld", xx, xx, MNSz, xx, xx, no, x, END_LIST},/*PUW=100*/
}, { /* 3 */
{OP_pldw, 0xf790f000, "pld", xx, xx, MPSz, xx, xx, no, x, tb22[2][0x00]},/*PUW=110*/
{OP_pld, 0xf7d0f000, "pld", xx, xx, MPSz, xx, xx, no, x, tb22[2][0x01]},/*PUW=110*/
},
};
/* Indexed by 6 bits 11:8,6,4 (thus: a-f | 0,1,4,5) */
const instr_info_t A32_ext_simd6[][64] = {
{ /* 0 */
{OP_vhadd_s8, 0xf2000000, "vhadd.s8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x02]},
{OP_vqadd_s8, 0xf2000010, "vqadd.s8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x03]},
{OP_vhadd_s8, 0xf2000040, "vhadd.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqadd_s8, 0xf2000050, "vqadd.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrhadd_s8, 0xf2000100, "vrhadd.s8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x06]},
{EXT_BIT7, 0xf2000110, "(ext bit7 0)", xx, xx, xx, xx, xx, no, x, 0},
{OP_vrhadd_s8, 0xf2000140, "vrhadd.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{EXT_BIT7, 0xf2000150, "(ext bit7 1)", xx, xx, xx, xx, xx, no, x, 1},
{OP_vhsub_s8, 0xf2000200, "vhsub.s8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x0a]},
{EXT_BIT7, 0xf2000210, "(ext bit7 2)", xx, xx, xx, xx, xx, no, x, 2},
{OP_vhsub_s8, 0xf2000240, "vhsub.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{EXT_BIT7, 0xf2000250, "(ext bit7 3)", xx, xx, xx, xx, xx, no, x, 3},
{OP_vcgt_s8, 0xf2000300, "vcgt.s8", VBq, xx, VAq, VCq, xx, no, x, tsi6c[0][0x01]},
{OP_vcge_s8, 0xf2000310, "vcge.s8", VBq, xx, VAq, VCq, xx, no, x, tsi6c[0][0x03]},
{OP_vcgt_s8, 0xf2000340, "vcgt.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vcge_s8, 0xf2000350, "vcge.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vshl_s8, 0xf2000400, "vshl.s8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x12]},
{OP_vqshl_s8, 0xf2000410, "vqshl.s8", VBq, xx, VAq, VCq, xx, no, x, tsi5[0][0x0f]},
{OP_vshl_s8, 0xf2000440, "vshl.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqshl_s8, 0xf2000450, "vqshl.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrshl_s8, 0xf2000500, "vrshl.s8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x16]},
{OP_vqrshl_s8, 0xf2000510, "vqrshl.s8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x17]},
{OP_vrshl_s8, 0xf2000540, "vrshl.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqrshl_s8, 0xf2000550, "vqrshl.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmax_s8, 0xf2000600, "vmax.s8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x1a]},
{OP_vmin_s8, 0xf2000610, "vmin.s8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x1b]},
{OP_vmax_s8, 0xf2000640, "vmax.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmin_s8, 0xf2000650, "vmin.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vabd_s8, 0xf2000700, "vabd.s8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x1e]},
{OP_vaba_s8, 0xf2000710, "vaba.s8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x1f]},
{OP_vabd_s8, 0xf2000740, "vabd.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vaba_s8, 0xf2000750, "vaba.s8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
/* 0x80 */
{OP_vadd_i8, 0xf2000800, "vadd.i8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x22]},
{OP_vtst_8, 0xf2000810, "vtst.8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x23]},
{OP_vadd_i8, 0xf2000840, "vadd.i8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vtst_8, 0xf2000850, "vtst.8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmla_i8, 0xf2000900, "vmla.i8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x26]},
{OP_vmul_i8, 0xf2000910, "vmul.i8", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x27]},
{OP_vmla_i8, 0xf2000940, "vmla.i8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmul_i8, 0xf2000950, "vmul.i8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vpmax_s8, 0xf2000a00, "vpmax.s8", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vpmin_s8, 0xf2000a10, "vpmin.s8", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{INVALID, 0xf2000a40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2000a50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2000b00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vpadd_i8, 0xf2000b10, "vpadd.i8", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{INVALID, 0xf2000b40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2000b50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2000c00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vfma_f32, 0xf2000c10, "vfma.f32", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x33]},
{OP_sha1c_32, 0xf2000c40, "sha1c.32", VBdq, xx, VAdq, VCdq, xx, v8, x, END_LIST},
{OP_vfma_f32, 0xf2000c50, "vfma.f32", VBdq, xx, VAdq, VCdq, xx, no, x, tfpA[10][0x00]},
{OP_vadd_f32, 0xf2000d00, "vadd.f32", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x36]},
{OP_vmla_f32, 0xf2000d10, "vmla.f32", VBq, xx, VAq, VCq, xx, v8, x, tsi6[0][0x37]},
{OP_vadd_f32, 0xf2000d40, "vadd.f32", VBdq, xx, VAdq, VCdq, xx, no, x, tfpA[3][0x00]},
{OP_vmla_f32, 0xf2000d50, "vmla.f32", VBdq, xx, VAdq, VCdq, xx, v8, x, tfpA[0][0x00]},
{OP_vceq_f32, 0xf2000e00, "vceq.f32", VBq, xx, VAq, VCq, xx, no, x, tsi6c[6][0x14]},
{INVALID, 0xf2000e10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vceq_f32, 0xf2000e40, "vceq.f32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf2000e50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vmax_f32, 0xf2000f00, "vmax.f32", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x3e]},
{OP_vrecps_f32, 0xf2000f10, "vrecps.f32", VBq, xx, VAq, VCq, xx, no, x, tsi6[0][0x3f]},
{OP_vmax_f32, 0xf2000f40, "vmax.f32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrecps_f32, 0xf2000f50, "vrecps.f32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
}, { /* 1 */
{OP_vhadd_s16, 0xf2100000, "vhadd.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x02]},
{OP_vqadd_s16, 0xf2100010, "vqadd.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x03]},
{OP_vhadd_s16, 0xf2100040, "vhadd.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqadd_s16, 0xf2100050, "vqadd.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrhadd_s16, 0xf2100100, "vrhadd.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x06]},
{OP_vbic, 0xf2100110, "vbic", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x07]},
{OP_vrhadd_s16, 0xf2100140, "vrhadd.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vbic, 0xf2100150, "vbic", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vhsub_s16, 0xf2100200, "vhsub.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x0a]},
{OP_vqsub_s16, 0xf2100210, "vqsub.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x0b]},
{OP_vhsub_s16, 0xf2100240, "vhsub.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqsub_s16, 0xf2100250, "vqsub.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vcgt_s16, 0xf2100300, "vcgt.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6c[3][0x01]},
{OP_vcge_s16, 0xf2100310, "vcge.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6c[3][0x03]},
{OP_vcgt_s16, 0xf2100340, "vcgt.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vcge_s16, 0xf2100350, "vcge.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vshl_s16, 0xf2100400, "vshl.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x12]},
{OP_vqshl_s16, 0xf2100410, "vqshl.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[4][0x1f]},
{OP_vshl_s16, 0xf2100440, "vshl.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqshl_s16, 0xf2100450, "vqshl.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrshl_s16, 0xf2100500, "vrshl.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x16]},
{OP_vqrshl_s16, 0xf2100510, "vqrshl.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x17]},
{OP_vrshl_s16, 0xf2100540, "vrshl.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqrshl_s16, 0xf2100550, "vqrshl.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmax_s16, 0xf2100600, "vmax.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x1a]},
{OP_vmin_s16, 0xf2100610, "vmin.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x1b]},
{OP_vmax_s16, 0xf2100640, "vmax.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmin_s16, 0xf2100650, "vmin.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vabd_s16, 0xf2100700, "vabd.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x1e]},
{OP_vaba_s16, 0xf2100710, "vaba.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x1f]},
{OP_vabd_s16, 0xf2100740, "vabd.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vaba_s16, 0xf2100750, "vaba.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
/* 0x80 */
{OP_vadd_i16, 0xf2100800, "vadd.i16", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x22]},
{OP_vtst_16, 0xf2100810, "vtst.16", VBq, xx, VAq, VCq, xx, no, x, tsi6[1][0x23]},
{OP_vadd_i16, 0xf2100840, "vadd.i16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vtst_16, 0xf2100850, "vtst.16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmla_i16, 0xf2100900, "vmla.i16", VBq, xx, VAq, VCq, xx, no, x, tsi6[10][0x02]},
{OP_vmul_i16, 0xf2100910, "vmul.i16", VBq, xx, VAq, VCq, xx, no, x, tsi6[10][0x22]},
{OP_vmla_i16, 0xf2100940, "vmla.i16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmul_i16, 0xf2100950, "vmul.i16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vpmax_s16, 0xf2100a00, "vpmax.s16", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vpmin_s16, 0xf2100a10, "vpmin.s16", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{INVALID, 0xf2100a40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100a50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqdmulh_s16, 0xf2100b00, "vqdmulh.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[10][0x2e]},
{OP_vpadd_i16, 0xf2100b10, "vpadd.i16", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vqdmulh_s16, 0xf2100b40, "vqdmulh.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf2100b50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100c00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100c10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_sha1p_32, 0xf2100c40, "sha1p.32", VBdq, xx, VAdq, VCdq, xx, v8, x, END_LIST},
{INVALID, 0xf2100c50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100d00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100d10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100d40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100d50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100e00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100e10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100e40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100e50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100f00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100f10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100f40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2100f50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
}, { /* 2 */
{OP_vhadd_s32, 0xf2200000, "vhadd.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x02]},
{OP_vqadd_s32, 0xf2200010, "vqadd.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x03]},
{OP_vhadd_s32, 0xf2200040, "vhadd.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqadd_s32, 0xf2200050, "vqadd.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrhadd_s32, 0xf2200100, "vrhadd.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x06]},
{OP_vorr, 0xf2200110, "vorr", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x07]},/*XXX: if src1==src2 then "vmov"*/
{OP_vrhadd_s32, 0xf2200140, "vrhadd.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vorr, 0xf2200150, "vorr", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},/*XXX: if src1==src2 then "vmov"*/
{OP_vhsub_s32, 0xf2200200, "vhsub.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x0a]},
{OP_vqsub_s32, 0xf2200210, "vqsub.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x0b]},
{OP_vhsub_s32, 0xf2200240, "vhsub.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqsub_s32, 0xf2200250, "vqsub.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vcgt_s32, 0xf2200300, "vcgt.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6c[6][0x01]},
{OP_vcge_s32, 0xf2200310, "vcge.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6c[6][0x03]},
{OP_vcgt_s32, 0xf2200340, "vcgt.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vcge_s32, 0xf2200350, "vcge.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vshl_s32, 0xf2200400, "vshl.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x12]},
{OP_vqshl_s32, 0xf2200410, "vqshl.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[5][0x1f]},
{OP_vshl_s32, 0xf2200440, "vshl.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqshl_s32, 0xf2200450, "vqshl.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrshl_s32, 0xf2200500, "vrshl.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x16]},
{OP_vqrshl_s32, 0xf2200510, "vqrshl.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x17]},
{OP_vrshl_s32, 0xf2200540, "vrshl.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqrshl_s32, 0xf2200550, "vqrshl.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmax_s32, 0xf2200600, "vmax.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x1a]},
{OP_vmin_s32, 0xf2200610, "vmin.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x1b]},
{OP_vmax_s32, 0xf2200640, "vmax.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmin_s32, 0xf2200650, "vmin.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vabd_s32, 0xf2200700, "vabd.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x1e]},
{OP_vaba_s32, 0xf2200710, "vaba.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x1f]},
{OP_vabd_s32, 0xf2200740, "vabd.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vaba_s32, 0xf2200750, "vaba.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vadd_i32, 0xf2200800, "vadd.i32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x22]},
{OP_vtst_32, 0xf2200810, "vtst.32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x23]},
{OP_vadd_i32, 0xf2200840, "vadd.i32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vtst_32, 0xf2200850, "vtst.32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmla_i32, 0xf2200900, "vmla.i32", VBq, xx, VAq, VCq, xx, no, x, tsi6[11][0x02]},
{OP_vmul_i32, 0xf2200910, "vmul.i32", VBq, xx, VAq, VCq, xx, no, x, tsi6[11][0x21]},
{OP_vmla_i32, 0xf2200940, "vmla.i32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmul_i32, 0xf2200950, "vmul.i32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vpmax_s32, 0xf2200a00, "vpmax.s32", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vpmin_s32, 0xf2200a10, "vpmin.s32", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{INVALID, 0xf2200a40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2200a50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqdmulh_s32, 0xf2200b00, "vqdmulh.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[11][0x31]},
{OP_vpadd_i32, 0xf2200b10, "vpadd.i32", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vqdmulh_s32, 0xf2200b40, "vqdmulh.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf2200b50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2200c00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vfms_f32, 0xf2200c10, "vfms.f32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x33]},
{OP_sha1m_32, 0xf2200c40, "sha1m.32", VBdq, xx, VAdq, VCdq, xx, v8, x, END_LIST},
{OP_vfms_f32, 0xf2200c50, "vfms.f32", VBdq, xx, VAdq, VCdq, xx, no, x, tfpA[10][0x02]},
{OP_vsub_f32, 0xf2200d00, "vsub.f32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x36]},
{OP_vmls_f32, 0xf2200d10, "vmls.f32", VBq, xx, VAq, VCq, xx, v8, x, tsi6[2][0x37]},
{OP_vsub_f32, 0xf2200d40, "vsub.f32", VBdq, xx, VAdq, VCdq, xx, no, x, tfpA[3][0x02]},
{OP_vmls_f32, 0xf2200d50, "vmls.f32", VBdq, xx, VAdq, VCdq, xx, v8, x, tfpA[0][0x02]},
{INVALID, 0xf2200e00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2200e10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2200e40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2200e50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vmin_f32, 0xf2200f00, "vmin.f32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x3e]},
{OP_vrsqrts_f32, 0xf2200f10, "vrsqrts.f32", VBq, xx, VAq, VCq, xx, no, x, tsi6[2][0x3f]},
{OP_vmin_f32, 0xf2200f40, "vmin.f32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrsqrts_f32, 0xf2200f50, "vrsqrts.f32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
}, { /* 3 */
/* XXX: this entry is sparse: should we make a new table to somehow compress it? */
{INVALID, 0xf2300000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqadd_s64, 0xf2300010, "vqadd.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[3][0x03]},
{INVALID, 0xf2300040, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqadd_s64, 0xf2300050, "vqadd.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf2300100, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vorn, 0xf2300110, "vorn", VBq, xx, VAq, VCq, xx, no, x, tsi6[3][0x07]},
{INVALID, 0xf2300140, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vorn, 0xf2300150, "vorn", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf2300200, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqsub_s64, 0xf2300210, "vqsub.s64", VBq, xx, VAq, VCq, xx, no, x, tsi6[3][0x0b]},
{INVALID, 0xf2300240, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqsub_s64, 0xf2300250, "vqsub.s64", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf2300300, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300310, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300340, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300350, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vshl_s64, 0xf2300400, "vshl.s64", VBq, xx, VAq, VCq, xx, no, x, tsi6[3][0x12]},
{OP_vqshl_s64, 0xf2300410, "vqshl.s64", VBq, xx, VAq, VCq, xx, no, x, tsi8[0][0x46]},
{OP_vshl_s64, 0xf2300440, "vshl.s64", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqshl_s64, 0xf2300450, "vqshl.s64", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrshl_s64, 0xf2300500, "vrshl.s64", VBq, xx, VAq, VCq, xx, no, x, tsi6[3][0x16]},
{OP_vqrshl_s64, 0xf2300510, "vqrshl.s64", VBq, xx, VAq, VCq, xx, no, x, tsi6[3][0x17]},
{OP_vrshl_s64, 0xf2300540, "vrshl.s64", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqrshl_s64, 0xf2300550, "vqrshl.s64", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf2300600, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300610, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300640, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300650, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300700, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300710, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300740, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300750, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
/* 0x80 */
{OP_vadd_i64, 0xf2300800, "vadd.i64", VBq, xx, VAq, VCq, xx, no, x, tsi6[3][0x22]},
{INVALID, 0xf2300810, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vadd_i64, 0xf2300840, "vadd.i64", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf2300850, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300900, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300910, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300940, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300950, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300a00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300a10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300a40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300a50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300b00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300b10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300b40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300b50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300c00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300c10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_sha1su0_32, 0xf2300c40, "sha1su0.32", VBdq, xx, VAdq, VCdq, xx, v8, x, END_LIST},
{INVALID, 0xf2300c50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300d00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300d10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300d40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300d50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300e00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300e10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300e40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300e50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300f00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300f10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300f40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2300f50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
}, { /* 4 */
{OP_vaddl_s16, 0xf2900000, "vaddl.s16", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vshr_s16, 0xf2900010, "vshr.s16", VBq, xx, VCq, i4_16, xx, no, x, tsi6[4][0x03]},/*XXX: imm = 16-imm*/
{OP_vmla_i16, 0xf2900040, "vmla.i16", VBq, xx, VAq, VC3h_q, i2x5_3, no, x, tsi6[1][0x24]},
{OP_vshr_s16, 0xf2900050, "vshr.s16", VBdq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{OP_vaddw_s16, 0xf2900100, "vaddw.s16", VBdq, xx, VAdq, VCq, xx, no, x, END_LIST},
{OP_vsra_s16, 0xf2900110, "vsra.s16", VBq, xx, VCq, i4_16, xx, no, x, tsi6[4][0x07]},/*XXX: imm = 16-imm*/
{INVALID, 0xf2900140, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vsra_s16, 0xf2900150, "vsra.s16", VBdq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{OP_vsubl_s16, 0xf2900200, "vsubl.s16", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vrshr_s16, 0xf2900210, "vrshr.s16", VBq, xx, VCq, i4_16, xx, no, x, tsi6[4][0x0b]},/*XXX: imm = 16-imm*/
{OP_vmlal_s16, 0xf2900240, "vmlal.s16", VBdq, xx, VAq, VC3h_q, i2x5_3, no, x, END_LIST},
{OP_vrshr_s16, 0xf2900250, "vrshr.s16", VBdq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{OP_vsubw_s16, 0xf2900300, "vsubw.s16", VBdq, xx, VAdq, VCq, xx, no, x, END_LIST},
{OP_vrsra_s16, 0xf2900310, "vrsra.s16", VBq, xx, VCq, i4_16, xx, no, x, tsi6[4][0x0f]},/*XXX: imm = 16-imm*/
{OP_vqdmlal_s16, 0xf2900340, "vqdmlal.s16", VBdq, xx, VAq, VC3h_q, i2x5_3, no, x, END_LIST},
{OP_vrsra_s16, 0xf2900350, "vrsra.s16", VBdq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{OP_vaddhn_i32, 0xf2900400, "vaddhn.i32", VBq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf2900410, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vmls_i16, 0xf2900440, "vmls.i16", VBq, xx, VAq, VC3h_q, i2x5_3, no, x, tsi6[10][0x12]},
{INVALID, 0xf2900450, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vabal_s16, 0xf2900500, "vabal.s16", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vshl_i16, 0xf2900510, "vshl.i16", VBq, xx, VCq, i4_16, xx, no, x, tsi6[4][0x17]},/*XXX: imm = 16-imm?*/
{INVALID, 0xf2900540, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vshl_i16, 0xf2900550, "vshl.i16", VBdq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm?*/
{OP_vsubhn_i32, 0xf2900600, "vsubhn.i32", VBq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf2900610, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vmlsl_s16, 0xf2900640, "vmlsl.s16", VBdq, xx, VAq, VC3h_q, i2x5_3, no, x, END_LIST},
{INVALID, 0xf2900650, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vabdl_s16, 0xf2900700, "vabdl.s16", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vqshl_s16, 0xf2900710, "vqshl.s16", VBq, xx, VCq, i4_16, xx, no, x, tsi6[1][0x11]},/*XXX: imm = imm-16*/
{OP_vqdmlsl_s16, 0xf2900740, "vqdmlsl.s16", VBdq, xx, VAq, VC3h_q, i2x5_3, no, x, END_LIST},
{OP_vqshl_s16, 0xf2900750, "vqshl.s16", VBdq, xx, VCdq, i4_16, xx, no, x, tsi6[1][0x13]},/*XXX: imm = imm-16*/
/* 0x80 */
{OP_vmlal_s16, 0xf2900800, "vmlal.s16", VBdq, xx, VAq, VCq, xx, no, x, tsi6[4][0x0a]},
{OP_vshrn_i16, 0xf2900810, "vshrn.i16", VBq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{OP_vmul_i16, 0xf2900840, "vmul.i16", VBq, xx, VAq, VC3h_q, i2x5_3, no, x, tsi6[1][0x25]},
{OP_vrshrn_i16, 0xf2900850, "vrshrn.i16", VBq, xx, VCq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{OP_vqdmlal_s16, 0xf2900900, "vqdmlal.s16", VBdq, xx, VAq, VCq, xx, no, x, tsi6[4][0x0e]},
{OP_vqshrn_s32, 0xf2900910, "vqshrn.s32", VBq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{INVALID, 0xf2900940, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqrshrn_s32, 0xf2900950, "vqrshrn.s32", VBq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{OP_vmlsl_s16, 0xf2900a00, "vmlsl.s16", VBdq, xx, VAq, VCq, xx, no, x, tsi6[4][0x1a]},
{EXT_IMM1916, 0xf2900a10, "(ext imm1916 1)", xx, xx, xx, xx, xx, no, x, 1},
{OP_vmull_s16, 0xf2900a40, "vmull.s16", VBdq, xx, VAq, VC3h_q, i2x5_3, no, x, END_LIST},
{INVALID, 0xf2900a50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqdmlsl_s16, 0xf2900b00, "vqdmlsl.s16", VBdq, xx, VAq, VCq, xx, no, x, tsi6[4][0x1e]},
{INVALID, 0xf2900b10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqdmull_s16, 0xf2900b40, "vqdmull.s16", VBdq, xx, VAq, VC3h_q, i2x5_3, no, x, END_LIST},
{INVALID, 0xf2900b50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vmull_s16, 0xf2900c00, "vmull.s16", VBdq, xx, VAq, VCq, xx, no, x, tsi6[4][0x2a]},
{INVALID, 0xf2900c10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqdmulh_s16, 0xf2900c40, "vqdmulh.s16", VBq, xx, VAq, VC3h_q, i2x5_3, no, x, tsi6[1][0x2c]},
{INVALID, 0xf2900c50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqdmull_s16, 0xf2900d00, "vqdmull.s16", VBdq, xx, VAq, VCq, xx, no, x, tsi6[4][0x2e]},
{INVALID, 0xf2900d10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqrdmulh_s16, 0xf2900d40, "vqrdmulh.s16", VBq, xx, VAq, VC3h_q, i2x5_3, no, x, tsi6[10][0x32]},
{INVALID, 0xf2900d50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2900e00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2900e10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2900e40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2900e50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2900f00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2900f10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2900f40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2900f50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
}, { /* 5 */
{OP_vaddl_s32, 0xf2a00000, "vaddl.s32", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vshr_s32, 0xf2a00010, "vshr.s32", VBq, xx, VCq, i5_16, xx, no, x, tsi6[5][0x03]},/*XXX: imm = 32-imm*/
{OP_vmla_i32, 0xf2a00040, "vmla.i32", VBq, xx, VAq, VC4d_q, i1_5, no, x, tsi6[2][0x24]},
{OP_vshr_s32, 0xf2a00050, "vshr.s32", VBdq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vaddw_s32, 0xf2a00100, "vaddw.s32", VBdq, xx, VAdq, VCq, xx, no, x, END_LIST},
{OP_vsra_s32, 0xf2a00110, "vsra.s32", VBq, xx, VCq, i5_16, xx, no, x, tsi6[5][0x07]},/*XXX: imm = 32-imm*/
{OP_vmla_f32, 0xf2a00140, "vmla.f32", VBq, xx, VAq, VC4d_q, i1_5, no, x, tsi6[11][0x06]},
{OP_vsra_s32, 0xf2a00150, "vsra.s32", VBdq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vsubl_s32, 0xf2a00200, "vsubl.s32", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vrshr_s32, 0xf2a00210, "vrshr.s32", VBq, xx, VCq, i5_16, xx, no, x, tsi6[5][0x0b]},/*XXX: imm = 32-imm*/
{OP_vmlal_s32, 0xf2a00240, "vmlal.s32", VBdq, xx, VAq, VC4d_q, i1_5, no, x, END_LIST},
{OP_vrshr_s32, 0xf2a00250, "vrshr.s32", VBdq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vsubw_s32, 0xf2a00300, "vsubw.s32", VBdq, xx, VAdq, VCq, xx, no, x, END_LIST},
{OP_vrsra_s32, 0xf2a00310, "vrsra.s32", VBq, xx, VCq, i5_16, xx, no, x, tsi6[5][0x0f]},/*XXX: imm = 32-imm*/
{OP_vqdmlal_s32, 0xf2a00340, "vqdmlal.s32", VBdq, xx, VAq, VC4d_q, i1_5, no, x, END_LIST},
{OP_vrsra_s32, 0xf2a00350, "vrsra.s32", VBdq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vaddhn_i64, 0xf2a00400, "vaddhn.i64", VBq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf2a00410, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vmls_i32, 0xf2a00440, "vmls.i32", VBq, xx, VAq, VC4d_q, i1_5, no, x, tsi6[11][0x11]},
{INVALID, 0xf2a00450, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vabal_s32, 0xf2a00500, "vabal.s32", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vshl_i32, 0xf2a00510, "vshl.i32", VBq, xx, VCq, i5_16, xx, no, x, tsi6[5][0x17]},/*XXX: imm = 32-imm?*/
{OP_vmls_f32, 0xf2a00540, "vmls.f32", VBq, xx, VAq, VC4d_q, i1_5, no, x, tsi6[11][0x15]},
{OP_vshl_i32, 0xf2a00550, "vshl.i32", VBdq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm?*/
{OP_vsubhn_i64, 0xf2a00600, "vsubhn.i64", VBq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf2a00610, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vmlsl_s32, 0xf2a00640, "vmlsl.s32", VBdq, xx, VAq, VC4d_q, i1_5, no, x, END_LIST},
{INVALID, 0xf2a00650, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vabdl_s32, 0xf2a00700, "vabdl.s32", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vqshl_s32, 0xf2a00710, "vqshl.s32", VBq, xx, VCq, i5_16, xx, no, x, tsi6[2][0x11]},/*XXX: imm = imm-32*/
{OP_vqdmlsl_s32, 0xf2a00740, "vqdmlsl.s32", VBdq, xx, VAq, VC4d_q, i1_5, no, x, END_LIST},
{OP_vqshl_s32, 0xf2a00750, "vqshl.s32", VBdq, xx, VCdq, i5_16, xx, no, x, tsi6[2][0x13]},/*XXX: imm = imm-32*/
/* 0x80 */
{OP_vmlal_s32, 0xf2a00800, "vmlal.s32", VBdq, xx, VAq, VCq, xx, no, x, tsi6[5][0x0a]},
{OP_vshrn_i32, 0xf2a00810, "vshrn.i32", VBq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vmul_i32, 0xf2a00840, "vmul.i32", VBq, xx, VAq, VC4d_q, i1_5, no, x, tsi6[2][0x25]},
{OP_vrshrn_i32, 0xf2a00850, "vrshrn.i32", VBq, xx, VCq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vqdmlal_s32, 0xf2a00900, "vqdmlal.s32", VBdq, xx, VAq, VCq, xx, no, x, tsi6[5][0x0e]},
{OP_vqshrn_s64, 0xf2a00910, "vqshrn.s64", VBq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vmul_f32, 0xf2a00940, "vmul.f32", VBq, xx, VAq, VC4d_q, i1_5, no, x, tsi6[11][0x25]},
{OP_vqrshrn_s64, 0xf2a00950, "vqrshrn.s64", VBq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vmlsl_s32, 0xf2a00a00, "vmlsl.s32", VBdq, xx, VAq, VCq, xx, no, x, tsi6[5][0x1a]},
{EXT_IMM2016, 0xf2a00a10, "(ext imm2016 0)", xx, xx, xx, xx, xx, no, x, 0},
{OP_vmull_s32, 0xf2a00a40, "vmull.s32", VBdq, xx, VAq, VC4d_q, i1_5, no, x, END_LIST},
{INVALID, 0xf2a00a50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqdmlsl_s32, 0xf2a00b00, "vqdmlsl.s32", VBdq, xx, VAq, VCq, xx, no, x, tsi6[5][0x1e]},
{INVALID, 0xf2a00b10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqdmull_s32, 0xf2a00b40, "vqdmull.s32", VBdq, xx, VAq, VC4d_q, i1_5, no, x, END_LIST},
{INVALID, 0xf2a00b50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vmull_s32, 0xf2a00c00, "vmull.s32", VBdq, xx, VAq, VCq, xx, no, x, tsi6[5][0x2a]},
{INVALID, 0xf2a00c10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqdmulh_s32, 0xf2a00c40, "vqdmulh.s32", VBq, xx, VAq, VC4d_q, i1_5, no, x, tsi6[2][0x2c]},
{INVALID, 0xf2a00c50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqdmull_s32, 0xf2a00d00, "vqdmull.s32", VBdq, xx, VAq, VCq, xx, no, x, tsi6[5][0x2e]},
{INVALID, 0xf2a00d10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqrdmulh_s32, 0xf2a00d40, "vqrdmulh.s32", VBq, xx, VAq, VC4d_q, i1_5, no, x, tsi6[11][0x35]},
{INVALID, 0xf2a00d50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vmull_p32, 0xf2a00e00, "vmull.p32", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vcvt_f32_s32, 0xf2a00e10, "vcvt.f32.s32", VBq, xx, VCq, i6_16, xx, no, x, tsi2[0][0x01]},
{INVALID, 0xf2a00e40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vcvt_f32_s32, 0xf2a00e50, "vcvt.f32.s32", VBdq, xx, VCdq, i6_16, xx, no, x, t16[1][0x0a]},
{OP_vcvt_s32_f32, 0xf2a00f10, "vcvt.s32.f32", VBq, xx, VCq, i6_16, xx, no, x, tsi6c[8][0x1b]},
{INVALID, 0xf2a00f40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vcvt_s32_f32, 0xf2a00f50, "vcvt.s32.f32", VBdq, xx, VCdq, i6_16, xx, no, x, t16[1][0x0e]},
}, { /* 6 */
{OP_vhadd_u8, 0xf3000000, "vhadd.u8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x02]},
{OP_vqadd_u8, 0xf3000010, "vqadd.u8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x03]},
{OP_vhadd_u8, 0xf3000040, "vhadd.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqadd_u8, 0xf3000050, "vqadd.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrhadd_u8, 0xf3000100, "vrhadd.u8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x06]},
{EXT_BIT7, 0xf2000110, "(ext bit7 4)", xx, xx, xx, xx, xx, no, x, 4},
{OP_vrhadd_u8, 0xf3000140, "vrhadd.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{EXT_BIT7, 0xf2000150, "(ext bit7 5)", xx, xx, xx, xx, xx, no, x, 5},
{OP_vhsub_u8, 0xf3000200, "vhsub.u8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x0a]},
{EXT_BIT7, 0xf2000210, "(ext bit7 6)", xx, xx, xx, xx, xx, no, x, 6},
{OP_vhsub_u8, 0xf3000240, "vhsub.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{EXT_BIT7, 0xf2000250, "(ext bit7 7)", xx, xx, xx, xx, xx, no, x, 7},
{OP_vcgt_u8, 0xf3000300, "vcgt.u8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x0e]},
{OP_vcge_u8, 0xf3000310, "vcge.u8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x0f]},
{OP_vcgt_u8, 0xf3000340, "vcgt.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vcge_u8, 0xf3000350, "vcge.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vshl_u8, 0xf3000400, "vshl.u8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x12]},
{OP_vqshl_u8, 0xf3000410, "vqshl.u8", VBq, xx, VAq, VCq, xx, no, x, tsi5[2][0x0f]},
{OP_vshl_u8, 0xf3000440, "vshl.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqshl_u8, 0xf3000450, "vqshl.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrshl_u8, 0xf3000500, "vrshl.u8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x16]},
{OP_vqrshl_u8, 0xf3000510, "vqrshl.u8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x17]},
{OP_vrshl_u8, 0xf3000540, "vrshl.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqrshl_u8, 0xf3000550, "vqrshl.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmax_u8, 0xf3000600, "vmax.u8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x1a]},
{OP_vmin_u8, 0xf3000610, "vmin.u8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x1b]},
{OP_vmax_u8, 0xf3000640, "vmax.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmin_u8, 0xf3000650, "vmin.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqshlu_s64, 0xf30006d0, "vqshlu.s64", VBdq, xx, VCdq, i6_16, xx, no, x, END_LIST},
{OP_vabd_u8, 0xf3000700, "vabd.u8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x1f]},
{OP_vaba_u8, 0xf3000710, "vaba.u8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x20]},
{OP_vabd_u8, 0xf3000740, "vabd.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vaba_u8, 0xf3000750, "vaba.u8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
/* 0x80 */
{OP_vsub_i8, 0xf3000800, "vsub.i8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x23]},
{OP_vceq_i8, 0xf3000810, "vceq.i8", VBq, xx, VAq, VCq, xx, no, x, tsi6c[0][0x05]},
{OP_vsub_i8, 0xf3000840, "vsub.i8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vceq_i8, 0xf3000850, "vceq.i8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmls_i8, 0xf3000900, "vmls.i8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x27]},
{OP_vmul_p8, 0xf3000910, "vmul.p8", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x28]},
{OP_vmls_i8, 0xf3000940, "vmls.i8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmul_p8, 0xf3000950, "vmul.p8", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vpmax_u8, 0xf3000a00, "vpmax.u8", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vpmin_u8, 0xf3000a10, "vpmin.u8", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{INVALID, 0xf3000a40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3000a50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vpadd_f32, 0xf3000b00, "vpadd.f32", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vmul_f32, 0xf3000b10, "vmul.p32", VBq, xx, VAq, VCq, xx, no, x, tsi6[5][0x26]},
{INVALID, 0xf3000b40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vmul_f32, 0xf3000b50, "vmul.p32", VBdq, xx, VAdq, VCdq, xx, no, x, tfpA[2][0x00]},
{INVALID, 0xf3000c00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3000c10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_sha256h_32, 0xf3000c40, "sha256h.32", VBdq, xx, VAdq, VCdq, xx, v8, x, END_LIST},
{INVALID, 0xf3000c50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vcge_f32, 0xf3000e00, "vcge.f32", VBq, xx, VAq, VCq, xx, no, x, tsi6c[6][0x12]},
{OP_vacge_f32, 0xf3000e10, "vacge.f32", VBq, xx, VAq, VCq, xx, no, x, tsi6[6][0x38]},
{OP_vcge_f32, 0xf3000e40, "vcge.f32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vacge_f32, 0xf3000e50, "vacge.f32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vpmax_f32, 0xf3000f00, "vpmax.f32", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vmaxnm_f32, 0xf3000f10, "vmaxnm.f32", VBq, xx, VAq, VCq, xx, v8, x, tsi6[6][0x3c]},
{INVALID, 0xf3000f40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vmaxnm_f32, 0xf3000f50, "vmaxnm.f32", VBdq, xx, VAdq, VCdq, xx, v8, x, tb6[1][0x00]},
}, { /* 7 */
{OP_vhadd_u16, 0xf3100000, "vhadd.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x02]},
{OP_vqadd_u16, 0xf3100010, "vqadd.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x03]},
{OP_vhadd_u16, 0xf3100040, "vhadd.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqadd_u16, 0xf3100050, "vqadd.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrhadd_u16, 0xf3100100, "vrhadd.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x06]},
{OP_vbsl, 0xf3100110, "vbsl", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x07]},
{OP_vrhadd_u16, 0xf3100140, "vrhadd.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vbsl, 0xf3100150, "vbsl", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vhsub_u16, 0xf3100200, "vhsub.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x0a]},
{OP_vqsub_u16, 0xf3100210, "vqsub.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x0b]},
{OP_vhsub_u16, 0xf3100240, "vhsub.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqsub_u16, 0xf3100250, "vqsub.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vcgt_u16, 0xf3100300, "vcgt.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x0e]},
{OP_vcge_u16, 0xf3100310, "vcge.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x0f]},
{OP_vcgt_u16, 0xf3100340, "vcgt.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vcge_u16, 0xf3100350, "vcge.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vshl_u16, 0xf3100400, "vshl.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x12]},
{OP_vqshl_u16, 0xf3100410, "vqshl.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[10][0x1f]},
{OP_vshl_u16, 0xf3100440, "vshl.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqshl_u16, 0xf3100450, "vqshl.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrshl_u16, 0xf3100500, "vrshl.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x16]},
{OP_vqrshl_u16, 0xf3100510, "vqrshl.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x17]},
{OP_vrshl_u16, 0xf3100540, "vrshl.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqrshl_u16, 0xf3100550, "vqrshl.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmax_u16, 0xf3100600, "vmax.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x1a]},
{OP_vmin_u16, 0xf3100610, "vmin.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x1b]},
{OP_vmax_u16, 0xf3100640, "vmax.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmin_u16, 0xf3100650, "vmin.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vabd_u16, 0xf3100700, "vabd.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x1e]},
{OP_vaba_u16, 0xf3100710, "vaba.u16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x1f]},
{OP_vabd_u16, 0xf3100740, "vabd.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vaba_u16, 0xf3100750, "vaba.u16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vsub_i16, 0xf3100800, "vsub.i16", VBq, xx, VAq, VCq, xx, no, x, tsi6[7][0x22]},
{OP_vceq_i16, 0xf3100810, "vceq.i16", VBq, xx, VAq, VCq, xx, no, x, tsi6c[3][0x05]},
{OP_vsub_i16, 0xf3100840, "vsub.i16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vceq_i16, 0xf3100850, "vceq.i16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmls_i16, 0xf3100900, "vmls.i16", VBq, xx, VAq, VCq, xx, no, x, tsi6[4][0x12]},
{INVALID, 0xf3100910, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vmls_i16, 0xf3100940, "vmls.i16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf3100950, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vpmax_u16, 0xf3100a00, "vpmax.u16", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vpmin_u16, 0xf3100a10, "vpmin.u16", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{INVALID, 0xf3100a40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100a50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqrdmulh_s16, 0xf3100b00, "vqrdmulh.s16", VBq, xx, VAq, VCq, xx, no, x, tsi6[4][0x36]},
{INVALID, 0xf3100b10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqrdmulh_s16, 0xf3100b40, "vqrdmulh.s16", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf3100b50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100c00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100c10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_sha256h2_32, 0xf3100c40, "sha256h2.32", VBdq, xx, VAdq, VCdq, xx, v8, x, END_LIST},
{INVALID, 0xf3100c50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100d00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100d10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100d40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100d50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100e00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100e10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100e40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100e50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100f00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100f10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100f40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3100f50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
}, { /* 8 */
{OP_vhadd_u32, 0xf3200000, "vhadd.u32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x02]},
{OP_vqadd_u32, 0xf3200010, "vqadd.u64", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x03]},
{OP_vhadd_u32, 0xf3200040, "vhadd.u32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqadd_u32, 0xf3200050, "vqadd.u64", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrhadd_u32, 0xf3200100, "vrhadd.u32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x06]},
{OP_vbit, 0xf3200110, "vbit", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x07]},
{OP_vrhadd_u32, 0xf3200140, "vrhadd.u32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vbit, 0xf3200150, "vbit", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vhsub_u32, 0xf3200200, "vhsub.u32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x0a]},
{OP_vqsub_u32, 0xf3200210, "vqsub.u32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x0b]},
{OP_vhsub_u32, 0xf3200240, "vhsub.u32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqsub_u32, 0xf3200250, "vqsub.u32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vcgt_u32, 0xf3200300, "vcgt.u32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x0e]},
{OP_vcge_u32, 0xf3200310, "vcge.u32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x0f]},
{OP_vcgt_u32, 0xf3200340, "vcgt.u32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vcge_u32, 0xf3200350, "vcge.u32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vshl_u32, 0xf3200400, "vshl.u32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x12]},
{OP_vqshl_u32, 0xf3200410, "vqshl.u32", VBq, xx, VAq, VCq, xx, no, x, tsi6[11][0x1e]},
{OP_vshl_u32, 0xf3200440, "vshl.u32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqshl_u32, 0xf3200450, "vqshl.u32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrshl_u32, 0xf3200500, "vrshl.u32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x16]},
{OP_vqrshl_u32, 0xf3200510, "vqrshl.u32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x17]},
{OP_vrshl_u32, 0xf3200540, "vrshl.u32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqrshl_u32, 0xf3200550, "vqrshl.u32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmax_u32, 0xf3200600, "vmax.u32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x1a]},
{OP_vmin_u32, 0xf3200610, "vmin.u32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x1b]},
{OP_vmax_u32, 0xf3200640, "vmax.u32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmin_u32, 0xf3200650, "vmin.u32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vaba_u32, 0xf3200700, "vaba.u32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x1e]},
{OP_vabd_u32, 0xf3200710, "vabd.u32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x1f]},
{OP_vaba_u32, 0xf3200740, "vaba.u32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vabd_u32, 0xf3200750, "vabd.u32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
/* 0x80 */
{OP_vsub_i32, 0xf3200800, "vsub.i32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x22]},
{OP_vceq_i32, 0xf3200810, "vceq.i32", VBq, xx, VAq, VCq, xx, no, x, tsi6c[6][0x05]},
{OP_vsub_i32, 0xf3200840, "vsub.i32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vceq_i32, 0xf3200850, "vceq.i32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmls_i32, 0xf3200900, "vmls.i32", VBq, xx, VAq, VCq, xx, no, x, tsi6[5][0x12]},
{OP_vmul_p32, 0xf3200910, "vmul.p32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x27]},
{OP_vmls_i32, 0xf3200940, "vmls.i32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vmul_p32, 0xf3200950, "vmul.p32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vpmax_u32, 0xf3200a00, "vpmax.u32", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vpmin_u32, 0xf3200a10, "vpmin.u32", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{INVALID, 0xf3200a40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3200a50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqrdmulh_s32, 0xf3200b00, "vqrdmulh.s32", VBq, xx, VAq, VCq, xx, no, x, tsi6[5][0x36]},
{INVALID, 0xf3200b10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqrdmulh_s32, 0xf3200b40, "vqrdmulh.s32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf3200b50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3200c00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3200c10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_sha256su1_32, 0xf3200c40, "sha256su1.32", VBdq, xx, VAdq, VCdq, xx, v8, x, END_LIST},
{INVALID, 0xf3200c50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3200d00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3200d10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3200d40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3200d50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vcgt_f32, 0xf3200e00, "vcgt.f32", VBq, xx, VAq, VCq, xx, no, x, tsi6c[6][0x10]},
{OP_vacgt_f32, 0xf3200e10, "vacgt.f32", VBq, xx, VAq, VCq, xx, no, x, tsi6[8][0x3b]},
{OP_vcgt_f32, 0xf3200e40, "vcgt.f32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vacgt_f32, 0xf3200e50, "vacgt.f32", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vpmin_f32, 0xf3200f00, "vpmin.f32", VBq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vminnm_f32, 0xf3200f10, "vminnm.f32", VBq, xx, VAq, VCq, xx, v8, x, tsi6[8][0x3f]},
{INVALID, 0xf3200f40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vminnm_f32, 0xf3200f50, "vminnm.f32", VBdq, xx, VAdq, VCdq, xx, v8, x, tb6[1][0x01]},
}, { /* 9 */
/* XXX: this entry is sparse: should we make a new table to somehow compress it? */
{INVALID, 0xf3300000, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqadd_u64, 0xf3300010, "vqadd.u64", VBq, xx, VAq, VCq, xx, no, x, tsi6[9][0x03]},
{INVALID, 0xf3300040, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqadd_u64, 0xf3300050, "vqadd.u64", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf3300100, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vbif, 0xf3300110, "vbif", VBq, xx, VAq, VCq, xx, no, x, tsi6[9][0x07]},
{INVALID, 0xf3300140, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vbif, 0xf3300150, "vbif", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf3300200, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqsub_u64, 0xf3300210, "vqsub.u64", VBq, xx, VAq, VCq, xx, no, x, tsi6[9][0x0b]},
{INVALID, 0xf3300240, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqsub_u64, 0xf3300250, "vqsub.u64", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf3300300, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300310, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300340, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300350, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vshl_u64, 0xf3300400, "vshl.u64", VBq, xx, VAq, VCq, xx, no, x, tsi6[9][0x12]},
{OP_vqshl_u64, 0xf3300410, "vqshl.u64", VBq, xx, VAq, VCq, xx, no, x, tsi8[1][0x46]},
{OP_vshl_u64, 0xf3300440, "vshl.u64", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqshl_u64, 0xf3300450, "vqshl.u64", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vrshl_u64, 0xf3300500, "vrshl.u64", VBq, xx, VAq, VCq, xx, no, x, tsi6[9][0x16]},
{OP_vqrshl_u64, 0xf3300510, "vqrshl.u64", VBq, xx, VAq, VCq, xx, no, x, tsi6[9][0x17]},
{OP_vrshl_u64, 0xf3300540, "vrshl.u64", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqrshl_u64, 0xf3300550, "vqrshl.u64", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf3300600, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300610, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300640, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300650, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300700, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300710, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300740, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300750, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
/* 0x80 */
{OP_vsub_i64, 0xf3300800, "vsub.i64", VBq, xx, VAq, VCq, xx, no, x, tsi6[9][0x22]},
{INVALID, 0xf3300810, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vsub_i64, 0xf3300840, "vsub.i64", VBdq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{INVALID, 0xf3300850, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300900, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300910, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300940, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300950, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300a00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300a10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300a40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300a50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300b00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300b10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300b40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300b50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300c00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300c10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300c40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300c50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300d00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300d10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300d40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300d50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300e00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300e10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300e40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300e50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300f00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300f10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300f40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3300f50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
}, { /* 10 */
{OP_vaddl_u16, 0xf3900000, "vaddl.u16", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vshr_u16, 0xf3900010, "vshr.u16", VBq, xx, VCq, i4_16, xx, no, x, tsi6[10][0x03]},/*XXX: imm = 16-imm*/
{OP_vmla_i16, 0xf3900040, "vmla.i16", VBdq, xx, VAdq, VC3h_q, i2x5_3, no, x, tsi6[1][0x26]},
{OP_vshr_u16, 0xf3900050, "vshr.u16", VBdq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{OP_vaddw_u16, 0xf3900100, "vaddw.u16", VBdq, xx, VAdq, VCq, xx, no, x, END_LIST},
{OP_vsra_u16, 0xf3900110, "vsra.u16", VBq, xx, VCq, i4_16, xx, no, x, tsi6[10][0x07]},/*XXX: imm = 16-imm*/
{INVALID, 0xf3900140, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vsra_u16, 0xf3900150, "vsra.u16", VBdq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{OP_vsubl_u16, 0xf3900200, "vsubl.u16", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vrshr_u16, 0xf3900210, "vrshr.u16", VBq, xx, VCq, i4_16, xx, no, x, tsi6[10][0x0b]},/*XXX: imm = 16-imm*/
{OP_vmlal_u16, 0xf3900240, "vmlal.u16", VBdq, xx, VAq, VC3h_q, i2x5_3, no, x, END_LIST},
{OP_vrshr_u16, 0xf3900250, "vrshr.u16", VBdq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{OP_vsubw_u16, 0xf3900300, "vsubw.u16", VBdq, xx, VAdq, VCq, xx, no, x, END_LIST},
{OP_vrsra_u16, 0xf3900310, "vrsra.u16", VBq, xx, VCq, i4_16, xx, no, x, tsi6[10][0x0f]},/*XXX: imm = 16-imm*/
{INVALID, 0xf3900340, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vrsra_u16, 0xf3900350, "vrsra.u16", VBdq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{OP_vraddhn_i32, 0xf3900400, "vraddhn.i32", VBq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vsri_16, 0xf3900410, "vsri.16", VBq, xx, VCq, i4_16, xx, no, x, tsi6[10][0x13]},/*XXX: imm = 16-imm?*/
{OP_vmls_i16, 0xf3900440, "vmls.i16", VBdq, xx, VAdq, VC3h_q, i2x5_3, no, x, tsi6[7][0x26]},
{OP_vsri_16, 0xf3900450, "vsri.16", VBdq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm?*/
{OP_vabal_u16, 0xf3900500, "vabal.u16", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vsli_16, 0xf3900510, "vsli.16", VBq, xx, VCq, i4_16, xx, no, x, tsi6[10][0x17]},/*XXX: imm = 16-imm?*/
{INVALID, 0xf3900540, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vsli_16, 0xf3900550, "vsli.16", VBdq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm?*/
{OP_vrsubhn_i32, 0xf3900600, "vrsubhn.i32", VBq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqshlu_s16, 0xf3900610, "vqshlu.s16", VBq, xx, VCq, i4_16, xx, no, x, tsi6[10][0x1b]},/*XXX: imm = imm-16*/
{OP_vmlsl_u16, 0xf3900640, "vmlsl.u16", VBdq, xx, VAq, VC3h_q, i2x5_3, no, x, END_LIST},
{OP_vqshlu_s16, 0xf3900650, "vqshlu.s16", VBdq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = imm-16*/
{OP_vabdl_u16, 0xf3900700, "vabdl.u16", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vqshl_u16, 0xf3900710, "vqshl.u16", VBq, xx, VCq, i4_16, xx, no, x, tsi6[7][0x11]},/*XXX: imm = imm-16*/
{INVALID, 0xf3900740, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqshl_u16, 0xf3900750, "vqshl.u16", VBdq, xx, VCdq, i4_16, xx, no, x, tsi6[7][0x13]},/*XXX: imm = imm-16*/
/* 0x80 */
{OP_vmlal_u16, 0xf3900800, "vmlal.u16", VBdq, xx, VAq, VCq, xx, no, x, tsi6[10][0x0a]},
{OP_vqshrn_u32, 0xf3900810, "vqshrn.u32", VBq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{OP_vmul_i16, 0xf3900840, "vmul.i16", VBdq, xx, VAdq, VC3h_q, i2x5_3, no, x, tsi6[1][0x27]},
{OP_vqrshrn_u32, 0xf3900850, "vqrshrn.u32", VBq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{INVALID, 0xf3900900, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqshrun_s32, 0xf3900910, "vqshrun.s32", VBq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{INVALID, 0xf3900940, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqrshrun_s32, 0xf3900950, "vqrshrun.s32", VBq, xx, VCdq, i4_16, xx, no, x, END_LIST},/*XXX: imm = 16-imm*/
{OP_vmlsl_u16, 0xf3900a00, "vmlsl.u16", VBdq, xx, VAq, VCq, xx, no, x, tsi6[10][0x1a]},
{EXT_IMM1916, 0xf3900a10, "(ext imm1916 2)", xx, xx, xx, xx, xx, no, x, 2},
{OP_vmull_u16, 0xf3900a40, "vmull.u16", VBdq, xx, VAq, VC3h_q, i2x5_3, no, x, END_LIST},
{INVALID, 0xf3900a50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vmull_u16, 0xf3900c00, "vmull.u16", VBdq, xx, VAq, VCq, xx, no, x, tsi6[10][0x2a]},
{INVALID, 0xf3900c10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqdmulh_s16, 0xf3900c40, "vqdmulh.s16", VBdq, xx, VAdq, VC3h_q, i2x5_3, no, x, tsi6[1][0x2e]},
{INVALID, 0xf3900c50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3900d00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3900d10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqrdmulh_s16, 0xf3900d40, "vqrdmulh.s16", VBdq, xx, VAdq, VC3h_q, i2x5_3, no, x, tsi6[7][0x2e]},
{INVALID, 0xf3900d50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3900e00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3900e10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3900e40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3900e50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3900f00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3900f10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3900f40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3900f50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
}, { /* 11 */
{OP_vaddl_u32, 0xf3a00000, "vaddl.u32", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vshr_u32, 0xf3a00010, "vshr.u32", VBq, xx, VCq, i5_16, xx, no, x, tsi6[11][0x03]},/*XXX: imm = 32-imm*/
{OP_vmla_i32, 0xf3a00040, "vmla.i32", VBdq, xx, VAdq, VC4d_q, i1_5, no, x, tsi6[2][0x26]},
{OP_vshr_u32, 0xf3a00050, "vshr.u32", VBdq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vaddw_u32, 0xf3a00100, "vaddw.u32", VBdq, xx, VAdq, VCq, xx, no, x, END_LIST},
{OP_vsra_u32, 0xf3a00110, "vsra.u32", VBq, xx, VCq, i5_16, xx, no, x, tsi6[11][0x07]},/*XXX: imm = 32-imm*/
{OP_vmla_f32, 0xf3a00140, "vmla.f32", VBdq, xx, VAdq, VC4d_q, i1_5, no, x, tsi6[0][0x35]},
{OP_vsra_u32, 0xf3a00150, "vsra.u32", VBdq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vsubl_u32, 0xf3a00200, "vsubl.u32", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vrshr_u32, 0xf3a00210, "vrshr.u32", VBq, xx, VCq, i5_16, xx, no, x, tsi6[11][0x0b]},/*XXX: imm = 32-imm*/
{OP_vmlal_u32, 0xf3a00240, "vmlal.u32", VBdq, xx, VAq, VC4d_q, i1_5, no, x, END_LIST},
{OP_vrshr_u32, 0xf3a00250, "vrshr.u32", VBdq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vsubw_u32, 0xf3a00300, "vsubw.u32", VBdq, xx, VAdq, VCq, xx, no, x, END_LIST},
{OP_vrsra_u32, 0xf3a00310, "vrsra.u32", VBq, xx, VCq, i5_16, xx, no, x, tsi6[11][0x0e]},/*XXX: imm = 32-imm*/
{OP_vrsra_u32, 0xf3a00350, "vrsra.u32", VBdq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vraddhn_i64, 0xf3a00400, "vraddhn.i64", VBq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vsri_32, 0xf3a00410, "vsri.32", VBq, xx, VCq, i5_16, xx, no, x, tsi6[11][0x12]},/*XXX: imm = 32-imm?*/
{OP_vmls_i32, 0xf3a00440, "vmls.i32", VBdq, xx, VAdq, VC4d_q, i1_5, no, x, tsi6[8][0x26]},
{OP_vsri_32, 0xf3a00450, "vsri.32", VBdq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm?*/
{OP_vabal_u32, 0xf3a00500, "vabal.u32", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vsli_32, 0xf3a00510, "vsli.32", VBq, xx, VCq, i5_16, xx, no, x, tsi6[11][0x16]},/*XXX: imm = 32-imm?*/
{OP_vmls_f32, 0xf3a00540, "vmls.f32", VBdq, xx, VAdq, VC4d_q, i1_5, no, x, tsi6[2][0x35]},
{OP_vsli_32, 0xf3a00550, "vsli.32", VBdq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm?*/
{OP_vrsubhn_i64, 0xf3a00600, "vrsubhn.i64", VBq, xx, VAdq, VCdq, xx, no, x, END_LIST},
{OP_vqshlu_s32, 0xf3a00610, "vqshlu.s32", VBq, xx, VCq, i5_16, xx, no, x, tsi6[11][0x1a]},/*XXX: imm = imm-32*/
{OP_vmlsl_u32, 0xf3a00640, "vmlsl.u32", VBdq, xx, VAq, VC4d_q, i1_5, no, x, END_LIST},
{OP_vqshlu_s32, 0xf3a00650, "vqshlu.s32", VBdq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = imm-32*/
{OP_vabdl_u32, 0xf3a00700, "vabdl.u32", VBdq, xx, VAq, VCq, xx, no, x, END_LIST},
{OP_vqshl_u32, 0xf3a00710, "vqshl.u32", VBq, xx, VCq, i5_16, xx, no, x, tsi6[8][0x11]},/*XXX: imm = imm-32*/
{INVALID, 0xf3a00740, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqshl_u32, 0xf3a00750, "vqshl.u32", VBdq, xx, VCdq, i5_16, xx, no, x, tsi6[8][0x13]},/*XXX: imm = imm-32*/
/* 0x80 */
{OP_vmlal_u32, 0xf3a00800, "vmlal.u32", VBdq, xx, VAq, VCq, xx, no, x, tsi6[11][0x0a]},
{OP_vqshrn_u64, 0xf3a00810, "vqshrn.u64", VBq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vmul_i32, 0xf3a00840, "vmul.i32", VBdq, xx, VAdq, VC4d_q, i1_5, no, x, tsi6[2][0x27]},
{OP_vqrshrn_u64, 0xf3a00850, "vqrshrn.u64", VBq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{INVALID, 0xf3a00900, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqshrun_s64, 0xf3a00910, "vqshrun.s64", VBq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vmul_f32, 0xf3a00940, "vmul.f32", VBdq, xx, VAdq, VC4d_q, i1_5, no, x, tsi6[6][0x30]},
{OP_vqrshrun_s64, 0xf3a00950, "vqrshrun.s64", VBq, xx, VCdq, i5_16, xx, no, x, END_LIST},/*XXX: imm = 32-imm*/
{OP_vmlsl_u32, 0xf3a00a00, "vmlsl.u32", VBdq, xx, VAq, VCq, xx, no, x, tsi6[11][0x19]},
{EXT_IMM2016, 0xf3a00a10, "(ext imm2016 1)", xx, xx, xx, xx, xx, no, x, 1},
{OP_vmull_u32, 0xf3a00a40, "vmull.u32", VBdq, xx, VAq, VC4d_q, i1_5, no, x, END_LIST},
{INVALID, 0xf3a00a50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3a00b00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3a00b10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3a00b40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3a00b50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vmull_u32, 0xf3a00c00, "vmull.u32", VBdq, xx, VAq, VCq, xx, no, x, tsi6[11][0x29]},
{INVALID, 0xf3a00c10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqdmulh_s32, 0xf3a00c40, "vqdmulh.s32", VBdq, xx, VAdq, VC4d_q, i1_5, no, x, tsi6[2][0x2e]},
{INVALID, 0xf3a00c50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3a00d00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3a00d10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqrdmulh_s32, 0xf3a00d40, "vqrdmulh.s32", VBdq, xx, VAdq, VC4d_q, i1_5, no, x, tsi6[8][0x2e]},
{INVALID, 0xf3a00d50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf3a00e00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vcvt_f32_u32, 0xf3a00e10, "vcvt.f32.u32", VBq, xx, VCq, i6_16, xx, no, x, tsi2[1][0x01]},
{INVALID, 0xf3a00e40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vcvt_f32_u32, 0xf3a00e50, "vcvt.f32.u32", VBdq, xx, VCdq, i6_16, xx, no, x, t16[1][0x0b]},
{INVALID, 0xf3a00f00, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vcvt_u32_f32, 0xf3a00f10, "vcvt.u32.f32", VBq, xx, VCq, i6_16, xx, no, x, tsi2[5][0x01]},
{INVALID, 0xf3a00f40, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vcvt_u32_f32, 0xf3a00f50, "vcvt.u32.f32", VBdq, xx, VCdq, i6_16, xx, no, x, t16[1][0x0f]},
},
};
/* Indexed by bits 11:8,5 */
const instr_info_t A32_ext_simd5[][32] = {
{ /* 0 */
{OP_vshr_s8, 0xf2880010, "vshr.s8", VBq, xx, VCq, i3_16, xx, no, x, tsi5[0][0x01]},/*XXX: imm = 8-imm*/
{OP_vshr_s8, 0xf2880050, "vshr.s8", VBdq, xx, VCdq, i3_16, xx, no, x, END_LIST},/*XXX: imm = 8-imm*/
{OP_vsra_s8, 0xf2880110, "vsra.s8", VBq, xx, VCq, i3_16, xx, no, x, tsi5[0][0x03]},/*XXX: imm = 8-imm*/
{OP_vsra_s8, 0xf2880150, "vsra.s8", VBdq, xx, VCdq, i3_16, xx, no, x, END_LIST},/*XXX: imm = 8-imm*/
{OP_vrshr_s8, 0xf2880210, "vrshr.s8", VBq, xx, VCq, i3_16, xx, no, x, tsi5[0][0x05]},/*XXX: imm = 8-imm*/
{OP_vrshr_s8, 0xf2880250, "vrshr.s8", VBdq, xx, VCdq, i3_16, xx, no, x, END_LIST},/*XXX: imm = 8-imm*/
{OP_vrsra_s8, 0xf2880310, "vrsra.s8", VBq, xx, VCq, i3_16, xx, no, x, tsi5[0][0x07]},/*XXX: imm = 8-imm*/
{OP_vrsra_s8, 0xf2880350, "vrsra.s8", VBdq, xx, VCdq, i3_16, xx, no, x, END_LIST},/*XXX: imm = 8-imm*/
{INVALID, 0xf2880410, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2880450, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vshl_i8, 0xf2880510, "vshl.i8", VBq, xx, VCq, i3_16, xx, no, x, tsi5[0][0x0b]},/*XXX: imm = 8-imm?*/
{OP_vshl_i8, 0xf2880550, "vshl.i8", VBdq, xx, VCdq, i3_16, xx, no, x, END_LIST},/*XXX: imm = 8-imm?*/
{INVALID, 0xf2880610, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2880650, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{OP_vqshl_s8, 0xf2880710, "vqshl.s8", VBq, xx, VCq, i3_16, xx, no, x, tsi6[0][0x11]},/*XXX: imm = imm-8*/
{OP_vqshl_s8, 0xf2880750, "vqshl.s8", VBdq, xx, VCdq, i3_16, xx, no, x, tsi6[0][0x13]},/*XXX: imm = imm-8*/
{OP_vshrn_i8, 0xf2880810, "vshrn.i8", VBq, xx, VCdq, i3_16, xx, no, x, END_LIST},/*XXX: imm = 8-imm*/
{OP_vrshrn_i8, 0xf2880850, "vrshrn.i8", VBq, xx, VCq, i3_16, xx, no, x, END_LIST},/*XXX: imm = 8-imm*/
{OP_vqshrn_s16, 0xf2880910, "vqshrn.s16", VBq, xx, VCdq, i3_16, xx, no, x, END_LIST},/*XXX: imm = 8-imm*/
{OP_vqrshrn_s16, 0xf2880950, "vqrshrn.s16", VBq, xx, VCdq, i3_16, xx, no, x, END_LIST},/*XXX: imm = 8-imm*/
{EXT_IMM1816, 0xf2880a10, "(ext imm1816 0)", xx, xx, xx, xx, xx, no, x, 0},
{INVALID, 0xf2880a50, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2880b10, "(bad)", xx, xx, xx, xx, xx, no, x, NA},
{INVALID, 0xf2880b50, "(bad)"