@@ -681,194 +681,209 @@ enum ASTNodes {
681681 // SIMD opcodes
682682
683683 V128Load = 0x00 ,
684- V128Store = 0x01 ,
685- V128Const = 0x02 ,
686- V8x16Shuffle = 0x03 ,
687- I8x16Splat = 0x04 ,
688- I8x16ExtractLaneS = 0x05 ,
689- I8x16ExtractLaneU = 0x06 ,
690- I8x16ReplaceLane = 0x07 ,
691- I16x8Splat = 0x08 ,
692- I16x8ExtractLaneS = 0x09 ,
693- I16x8ExtractLaneU = 0x0a ,
694- I16x8ReplaceLane = 0x0b ,
695- I32x4Splat = 0x0c ,
696- I32x4ExtractLane = 0x0d ,
697- I32x4ReplaceLane = 0x0e ,
698- I64x2Splat = 0x0f ,
699- I64x2ExtractLane = 0x10 ,
700- I64x2ReplaceLane = 0x11 ,
701- F32x4Splat = 0x12 ,
702- F32x4ExtractLane = 0x13 ,
703- F32x4ReplaceLane = 0x14 ,
704- F64x2Splat = 0x15 ,
705- F64x2ExtractLane = 0x16 ,
706- F64x2ReplaceLane = 0x17 ,
707- I8x16Eq = 0x18 ,
708- I8x16Ne = 0x19 ,
709- I8x16LtS = 0x1a ,
710- I8x16LtU = 0x1b ,
711- I8x16GtS = 0x1c ,
712- I8x16GtU = 0x1d ,
713- I8x16LeS = 0x1e ,
714- I8x16LeU = 0x1f ,
715- I8x16GeS = 0x20 ,
716- I8x16GeU = 0x21 ,
717- I16x8Eq = 0x22 ,
718- I16x8Ne = 0x23 ,
719- I16x8LtS = 0x24 ,
720- I16x8LtU = 0x25 ,
721- I16x8GtS = 0x26 ,
722- I16x8GtU = 0x27 ,
723- I16x8LeS = 0x28 ,
724- I16x8LeU = 0x29 ,
725- I16x8GeS = 0x2a ,
726- I16x8GeU = 0x2b ,
727- I32x4Eq = 0x2c ,
728- I32x4Ne = 0x2d ,
729- I32x4LtS = 0x2e ,
730- I32x4LtU = 0x2f ,
731- I32x4GtS = 0x30 ,
732- I32x4GtU = 0x31 ,
733- I32x4LeS = 0x32 ,
734- I32x4LeU = 0x33 ,
735- I32x4GeS = 0x34 ,
736- I32x4GeU = 0x35 ,
737- F32x4Eq = 0x40 ,
738- F32x4Ne = 0x41 ,
739- F32x4Lt = 0x42 ,
740- F32x4Gt = 0x43 ,
741- F32x4Le = 0x44 ,
742- F32x4Ge = 0x45 ,
743- F64x2Eq = 0x46 ,
744- F64x2Ne = 0x47 ,
745- F64x2Lt = 0x48 ,
746- F64x2Gt = 0x49 ,
747- F64x2Le = 0x4a ,
748- F64x2Ge = 0x4b ,
749- V128Not = 0x4c ,
750- V128And = 0x4d ,
751- V128Or = 0x4e ,
752- V128Xor = 0x4f ,
753- V128AndNot = 0xd8 ,
754- V128Bitselect = 0x50 ,
755- I8x16Abs = 0xe1 ,
756- I8x16Neg = 0x51 ,
757- I8x16AnyTrue = 0x52 ,
758- I8x16AllTrue = 0x53 ,
759- I8x16Bitmask = 0xe4 ,
760- I8x16Shl = 0x54 ,
761- I8x16ShrS = 0x55 ,
762- I8x16ShrU = 0x56 ,
763- I8x16Add = 0x57 ,
764- I8x16AddSatS = 0x58 ,
765- I8x16AddSatU = 0x59 ,
766- I8x16Sub = 0x5a ,
767- I8x16SubSatS = 0x5b ,
768- I8x16SubSatU = 0x5c ,
769- I8x16Mul = 0x5d ,
770- I8x16MinS = 0x5e ,
771- I8x16MinU = 0x5f ,
772- I8x16MaxS = 0x60 ,
773- I8x16MaxU = 0x61 ,
774- I8x16AvgrU = 0xd9 ,
775- I16x8Abs = 0xe2 ,
776- I16x8Neg = 0x62 ,
777- I16x8AnyTrue = 0x63 ,
778- I16x8AllTrue = 0x64 ,
779- I16x8Bitmask = 0xe5 ,
780- I16x8Shl = 0x65 ,
781- I16x8ShrS = 0x66 ,
782- I16x8ShrU = 0x67 ,
783- I16x8Add = 0x68 ,
784- I16x8AddSatS = 0x69 ,
785- I16x8AddSatU = 0x6a ,
786- I16x8Sub = 0x6b ,
787- I16x8SubSatS = 0x6c ,
788- I16x8SubSatU = 0x6d ,
789- I16x8Mul = 0x6e ,
790- I16x8MinS = 0x6f ,
791- I16x8MinU = 0x70 ,
792- I16x8MaxS = 0x71 ,
793- I16x8MaxU = 0x72 ,
794- I16x8AvgrU = 0xda ,
795- I32x4Abs = 0xe3 ,
796- I32x4Neg = 0x73 ,
797- I32x4AnyTrue = 0x74 ,
798- I32x4AllTrue = 0x75 ,
799- I32x4Bitmask = 0xe6 ,
800- I32x4Shl = 0x76 ,
801- I32x4ShrS = 0x77 ,
802- I32x4ShrU = 0x78 ,
803- I32x4Add = 0x79 ,
804- I32x4Sub = 0x7c ,
805- I32x4Mul = 0x7f ,
806- I32x4MinS = 0x80 ,
807- I32x4MinU = 0x81 ,
808- I32x4MaxS = 0x82 ,
809- I32x4MaxU = 0x83 ,
810- I32x4DotSVecI16x8 = 0xdb ,
811- I64x2Neg = 0x84 ,
812- I64x2AnyTrue = 0x85 ,
813- I64x2AllTrue = 0x86 ,
814- I64x2Shl = 0x87 ,
815- I64x2ShrS = 0x88 ,
816- I64x2ShrU = 0x89 ,
817- I64x2Add = 0x8a ,
818- I64x2Sub = 0x8d ,
819- F32x4Abs = 0x95 ,
820- F32x4Neg = 0x96 ,
821- F32x4Sqrt = 0x97 ,
822- F32x4QFMA = 0x98 ,
823- F32x4QFMS = 0x99 ,
824- F32x4Add = 0x9a ,
825- F32x4Sub = 0x9b ,
826- F32x4Mul = 0x9c ,
827- F32x4Div = 0x9d ,
828- F32x4Min = 0x9e ,
829- F32x4Max = 0x9f ,
830- F64x2Abs = 0xa0 ,
831- F64x2Neg = 0xa1 ,
832- F64x2Sqrt = 0xa2 ,
833- F64x2QFMA = 0xa3 ,
834- F64x2QFMS = 0xa4 ,
835- F64x2Add = 0xa5 ,
836- F64x2Sub = 0xa6 ,
837- F64x2Mul = 0xa7 ,
838- F64x2Div = 0xa8 ,
839- F64x2Min = 0xa9 ,
840- F64x2Max = 0xaa ,
841- I32x4TruncSatSF32x4 = 0xab ,
842- I32x4TruncSatUF32x4 = 0xac ,
843- I64x2TruncSatSF64x2 = 0xad ,
844- I64x2TruncSatUF64x2 = 0xae ,
845- F32x4ConvertSI32x4 = 0xaf ,
846- F32x4ConvertUI32x4 = 0xb0 ,
847- F64x2ConvertSI64x2 = 0xb1 ,
848- F64x2ConvertUI64x2 = 0xb2 ,
849- V8x16LoadSplat = 0xc2 ,
850- V16x8LoadSplat = 0xc3 ,
851- V32x4LoadSplat = 0xc4 ,
852- V64x2LoadSplat = 0xc5 ,
853- I8x16NarrowSI16x8 = 0xc6 ,
854- I8x16NarrowUI16x8 = 0xc7 ,
855- I16x8NarrowSI32x4 = 0xc8 ,
856- I16x8NarrowUI32x4 = 0xc9 ,
857- I16x8WidenLowSI8x16 = 0xca ,
858- I16x8WidenHighSI8x16 = 0xcb ,
859- I16x8WidenLowUI8x16 = 0xcc ,
860- I16x8WidenHighUI8x16 = 0xcd ,
861- I32x4WidenLowSI16x8 = 0xce ,
862- I32x4WidenHighSI16x8 = 0xcf ,
863- I32x4WidenLowUI16x8 = 0xd0 ,
864- I32x4WidenHighUI16x8 = 0xd1 ,
865- I16x8LoadExtSVec8x8 = 0xd2 ,
866- I16x8LoadExtUVec8x8 = 0xd3 ,
867- I32x4LoadExtSVec16x4 = 0xd4 ,
868- I32x4LoadExtUVec16x4 = 0xd5 ,
869- I64x2LoadExtSVec32x2 = 0xd6 ,
870- I64x2LoadExtUVec32x2 = 0xd7 ,
871- V8x16Swizzle = 0xc0 ,
684+ I16x8LoadExtSVec8x8 = 0x01 ,
685+ I16x8LoadExtUVec8x8 = 0x02 ,
686+ I32x4LoadExtSVec16x4 = 0x03 ,
687+ I32x4LoadExtUVec16x4 = 0x04 ,
688+ I64x2LoadExtSVec32x2 = 0x05 ,
689+ I64x2LoadExtUVec32x2 = 0x06 ,
690+ V8x16LoadSplat = 0x07 ,
691+ V16x8LoadSplat = 0x08 ,
692+ V32x4LoadSplat = 0x09 ,
693+ V64x2LoadSplat = 0x0a ,
694+ V128Store = 0x0b ,
695+
696+ V128Const = 0x0c ,
697+ V8x16Shuffle = 0x0d ,
698+ V8x16Swizzle = 0x0e ,
699+
700+ I8x16Splat = 0x0f ,
701+ I16x8Splat = 0x10 ,
702+ I32x4Splat = 0x11 ,
703+ I64x2Splat = 0x12 ,
704+ F32x4Splat = 0x13 ,
705+ F64x2Splat = 0x14 ,
706+
707+ I8x16ExtractLaneS = 0x15 ,
708+ I8x16ExtractLaneU = 0x16 ,
709+ I8x16ReplaceLane = 0x17 ,
710+ I16x8ExtractLaneS = 0x18 ,
711+ I16x8ExtractLaneU = 0x19 ,
712+ I16x8ReplaceLane = 0x1a ,
713+ I32x4ExtractLane = 0x1b ,
714+ I32x4ReplaceLane = 0x1c ,
715+ I64x2ExtractLane = 0x1d ,
716+ I64x2ReplaceLane = 0x1e ,
717+ F32x4ExtractLane = 0x1f ,
718+ F32x4ReplaceLane = 0x20 ,
719+ F64x2ExtractLane = 0x21 ,
720+ F64x2ReplaceLane = 0x22 ,
721+
722+ I8x16Eq = 0x23 ,
723+ I8x16Ne = 0x24 ,
724+ I8x16LtS = 0x25 ,
725+ I8x16LtU = 0x26 ,
726+ I8x16GtS = 0x27 ,
727+ I8x16GtU = 0x28 ,
728+ I8x16LeS = 0x29 ,
729+ I8x16LeU = 0x2a ,
730+ I8x16GeS = 0x2b ,
731+ I8x16GeU = 0x2c ,
732+ I16x8Eq = 0x2d ,
733+ I16x8Ne = 0x2e ,
734+ I16x8LtS = 0x2f ,
735+ I16x8LtU = 0x30 ,
736+ I16x8GtS = 0x31 ,
737+ I16x8GtU = 0x32 ,
738+ I16x8LeS = 0x33 ,
739+ I16x8LeU = 0x34 ,
740+ I16x8GeS = 0x35 ,
741+ I16x8GeU = 0x36 ,
742+ I32x4Eq = 0x37 ,
743+ I32x4Ne = 0x38 ,
744+ I32x4LtS = 0x39 ,
745+ I32x4LtU = 0x3a ,
746+ I32x4GtS = 0x3b ,
747+ I32x4GtU = 0x3c ,
748+ I32x4LeS = 0x3d ,
749+ I32x4LeU = 0x3e ,
750+ I32x4GeS = 0x3f ,
751+ I32x4GeU = 0x40 ,
752+ F32x4Eq = 0x41 ,
753+ F32x4Ne = 0x42 ,
754+ F32x4Lt = 0x43 ,
755+ F32x4Gt = 0x44 ,
756+ F32x4Le = 0x45 ,
757+ F32x4Ge = 0x46 ,
758+ F64x2Eq = 0x47 ,
759+ F64x2Ne = 0x48 ,
760+ F64x2Lt = 0x49 ,
761+ F64x2Gt = 0x4a ,
762+ F64x2Le = 0x4b ,
763+ F64x2Ge = 0x4c ,
764+
765+ V128Not = 0x4d ,
766+ V128And = 0x4e ,
767+ V128AndNot = 0x4f ,
768+ V128Or = 0x50 ,
769+ V128Xor = 0x51 ,
770+ V128Bitselect = 0x52 ,
771+
772+ I8x16Abs = 0x60 ,
773+ I8x16Neg = 0x61 ,
774+ I8x16AnyTrue = 0x62 ,
775+ I8x16AllTrue = 0x63 ,
776+ I8x16Bitmask = 0x64 ,
777+ I8x16NarrowSI16x8 = 0x65 ,
778+ I8x16NarrowUI16x8 = 0x66 ,
779+ I8x16Shl = 0x6b ,
780+ I8x16ShrS = 0x6c ,
781+ I8x16ShrU = 0x6d ,
782+ I8x16Add = 0x6e ,
783+ I8x16AddSatS = 0x6f ,
784+ I8x16AddSatU = 0x70 ,
785+ I8x16Sub = 0x71 ,
786+ I8x16SubSatS = 0x72 ,
787+ I8x16SubSatU = 0x73 ,
788+ I8x16Mul = 0x75 ,
789+ I8x16MinS = 0x76 ,
790+ I8x16MinU = 0x77 ,
791+ I8x16MaxS = 0x78 ,
792+ I8x16MaxU = 0x79 ,
793+ I8x16AvgrU = 0x7b ,
794+
795+ I16x8Abs = 0x80 ,
796+ I16x8Neg = 0x81 ,
797+ I16x8AnyTrue = 0x82 ,
798+ I16x8AllTrue = 0x83 ,
799+ I16x8Bitmask = 0x84 ,
800+ I16x8NarrowSI32x4 = 0x85 ,
801+ I16x8NarrowUI32x4 = 0x86 ,
802+ I16x8WidenLowSI8x16 = 0x87 ,
803+ I16x8WidenHighSI8x16 = 0x88 ,
804+ I16x8WidenLowUI8x16 = 0x89 ,
805+ I16x8WidenHighUI8x16 = 0x8a ,
806+ I16x8Shl = 0x8b ,
807+ I16x8ShrS = 0x8c ,
808+ I16x8ShrU = 0x8d ,
809+ I16x8Add = 0x8e ,
810+ I16x8AddSatS = 0x8f ,
811+ I16x8AddSatU = 0x90 ,
812+ I16x8Sub = 0x91 ,
813+ I16x8SubSatS = 0x92 ,
814+ I16x8SubSatU = 0x93 ,
815+ I16x8Mul = 0x95 ,
816+ I16x8MinS = 0x96 ,
817+ I16x8MinU = 0x97 ,
818+ I16x8MaxS = 0x98 ,
819+ I16x8MaxU = 0x99 ,
820+ I16x8AvgrU = 0x9b ,
821+
822+ I32x4Abs = 0xa0 ,
823+ I32x4Neg = 0xa1 ,
824+ I32x4AnyTrue = 0xa2 ,
825+ I32x4AllTrue = 0xa3 ,
826+ I32x4Bitmask = 0xa4 ,
827+ I32x4WidenLowSI16x8 = 0xa7 ,
828+ I32x4WidenHighSI16x8 = 0xa8 ,
829+ I32x4WidenLowUI16x8 = 0xa9 ,
830+ I32x4WidenHighUI16x8 = 0xaa ,
831+ I32x4Shl = 0xab ,
832+ I32x4ShrS = 0xac ,
833+ I32x4ShrU = 0xad ,
834+ I32x4Add = 0xae ,
835+ I32x4Sub = 0xb1 ,
836+ I32x4DotSVecI16x8 = 0xb4 ,
837+ I32x4Mul = 0xb5 ,
838+ I32x4MinS = 0xb6 ,
839+ I32x4MinU = 0xb7 ,
840+ I32x4MaxS = 0xb8 ,
841+ I32x4MaxU = 0xb9 ,
842+
843+ I64x2Neg = 0xc1 ,
844+ I64x2AnyTrue = 0xc2 ,
845+ I64x2AllTrue = 0xc3 ,
846+ I64x2Shl = 0xcb ,
847+ I64x2ShrS = 0xcc ,
848+ I64x2ShrU = 0xcd ,
849+ I64x2Add = 0xce ,
850+ I64x2Sub = 0xd1 ,
851+ // TODO: i64x2.mul
852+
853+ F32x4Abs = 0xe0 ,
854+ F32x4Neg = 0xe1 ,
855+ F32x4Sqrt = 0xe3 ,
856+ F32x4Add = 0xe4 ,
857+ F32x4Sub = 0xe5 ,
858+ F32x4Mul = 0xe6 ,
859+ F32x4Div = 0xe7 ,
860+ F32x4Min = 0xe8 ,
861+ F32x4Max = 0xe9 ,
862+
863+ F64x2Abs = 0xec ,
864+ F64x2Neg = 0xed ,
865+ F64x2Sqrt = 0xef ,
866+ F64x2Add = 0xf0 ,
867+ F64x2Sub = 0xf1 ,
868+ F64x2Mul = 0xf2 ,
869+ F64x2Div = 0xf3 ,
870+ F64x2Min = 0xf4 ,
871+ F64x2Max = 0xf5 ,
872+
873+ I32x4TruncSatSF32x4 = 0xf8 ,
874+ I32x4TruncSatUF32x4 = 0xf9 ,
875+ F32x4ConvertSI32x4 = 0xfa ,
876+ F32x4ConvertUI32x4 = 0xfb ,
877+
878+ F32x4QFMA = 0xfc ,
879+ F32x4QFMS = 0xfd ,
880+ F64x2QFMA = 0xfe ,
881+ F64x2QFMS = 0xff ,
882+
883+ I64x2TruncSatSF64x2 = 0x0100 ,
884+ I64x2TruncSatUF64x2 = 0x0101 ,
885+ F64x2ConvertSI64x2 = 0x0102 ,
886+ F64x2ConvertUI64x2 = 0x0103 ,
872887
873888 // bulk memory opcodes
874889
0 commit comments