Source file src/simd/archsimd/internal/simd_test/binary_test.go

     1  // Copyright 2025 The Go Authors. All rights reserved.
     2  // Use of this source code is governed by a BSD-style
     3  // license that can be found in the LICENSE file.
     4  
     5  //go:build goexperiment.simd && amd64
     6  
     7  package simd_test
     8  
     9  import (
    10  	"simd/archsimd"
    11  	"testing"
    12  )
    13  
    14  func TestAdd(t *testing.T) {
    15  	testFloat32x4Binary(t, archsimd.Float32x4.Add, addSlice[float32])
    16  	testFloat32x8Binary(t, archsimd.Float32x8.Add, addSlice[float32])
    17  	testFloat64x2Binary(t, archsimd.Float64x2.Add, addSlice[float64])
    18  	testFloat64x4Binary(t, archsimd.Float64x4.Add, addSlice[float64])
    19  
    20  	testInt16x8Binary(t, archsimd.Int16x8.Add, addSlice[int16])
    21  	testInt32x4Binary(t, archsimd.Int32x4.Add, addSlice[int32])
    22  	testInt64x2Binary(t, archsimd.Int64x2.Add, addSlice[int64])
    23  	testInt8x16Binary(t, archsimd.Int8x16.Add, addSlice[int8])
    24  
    25  	testUint32x4Binary(t, archsimd.Uint32x4.Add, addSlice[uint32])
    26  	testUint64x2Binary(t, archsimd.Uint64x2.Add, addSlice[uint64])
    27  	testUint16x8Binary(t, archsimd.Uint16x8.Add, addSlice[uint16])
    28  	testUint8x16Binary(t, archsimd.Uint8x16.Add, addSlice[uint8])
    29  
    30  	if archsimd.X86.AVX2() {
    31  		testUint16x16Binary(t, archsimd.Uint16x16.Add, addSlice[uint16])
    32  		testUint32x8Binary(t, archsimd.Uint32x8.Add, addSlice[uint32])
    33  		testUint64x4Binary(t, archsimd.Uint64x4.Add, addSlice[uint64])
    34  		testUint8x32Binary(t, archsimd.Uint8x32.Add, addSlice[uint8])
    35  	}
    36  
    37  	if archsimd.X86.AVX2() {
    38  		testInt16x16Binary(t, archsimd.Int16x16.Add, addSlice[int16])
    39  		testInt32x8Binary(t, archsimd.Int32x8.Add, addSlice[int32])
    40  		testInt64x4Binary(t, archsimd.Int64x4.Add, addSlice[int64])
    41  		testInt8x32Binary(t, archsimd.Int8x32.Add, addSlice[int8])
    42  	}
    43  
    44  	if archsimd.X86.AVX512() {
    45  		testFloat32x16Binary(t, archsimd.Float32x16.Add, addSlice[float32])
    46  		testFloat64x8Binary(t, archsimd.Float64x8.Add, addSlice[float64])
    47  		testInt8x64Binary(t, archsimd.Int8x64.Add, addSlice[int8])
    48  		testInt16x32Binary(t, archsimd.Int16x32.Add, addSlice[int16])
    49  		testInt32x16Binary(t, archsimd.Int32x16.Add, addSlice[int32])
    50  		testInt64x8Binary(t, archsimd.Int64x8.Add, addSlice[int64])
    51  		testUint8x64Binary(t, archsimd.Uint8x64.Add, addSlice[uint8])
    52  		testUint16x32Binary(t, archsimd.Uint16x32.Add, addSlice[uint16])
    53  		testUint32x16Binary(t, archsimd.Uint32x16.Add, addSlice[uint32])
    54  		testUint64x8Binary(t, archsimd.Uint64x8.Add, addSlice[uint64])
    55  	}
    56  }
    57  
    58  func TestSub(t *testing.T) {
    59  	testFloat32x4Binary(t, archsimd.Float32x4.Sub, subSlice[float32])
    60  	testFloat32x8Binary(t, archsimd.Float32x8.Sub, subSlice[float32])
    61  	testFloat64x2Binary(t, archsimd.Float64x2.Sub, subSlice[float64])
    62  	testFloat64x4Binary(t, archsimd.Float64x4.Sub, subSlice[float64])
    63  
    64  	testInt32x4Binary(t, archsimd.Int32x4.Sub, subSlice[int32])
    65  	testInt16x8Binary(t, archsimd.Int16x8.Sub, subSlice[int16])
    66  	testInt64x2Binary(t, archsimd.Int64x2.Sub, subSlice[int64])
    67  	testInt8x16Binary(t, archsimd.Int8x16.Sub, subSlice[int8])
    68  
    69  	if archsimd.X86.AVX2() {
    70  		testInt16x16Binary(t, archsimd.Int16x16.Sub, subSlice[int16])
    71  		testInt32x8Binary(t, archsimd.Int32x8.Sub, subSlice[int32])
    72  		testInt64x4Binary(t, archsimd.Int64x4.Sub, subSlice[int64])
    73  		testInt8x32Binary(t, archsimd.Int8x32.Sub, subSlice[int8])
    74  	}
    75  
    76  	testUint32x4Binary(t, archsimd.Uint32x4.Sub, subSlice[uint32])
    77  	testUint16x8Binary(t, archsimd.Uint16x8.Sub, subSlice[uint16])
    78  	testUint64x2Binary(t, archsimd.Uint64x2.Sub, subSlice[uint64])
    79  	testUint8x16Binary(t, archsimd.Uint8x16.Sub, subSlice[uint8])
    80  
    81  	if archsimd.X86.AVX2() {
    82  		testUint16x16Binary(t, archsimd.Uint16x16.Sub, subSlice[uint16])
    83  		testUint32x8Binary(t, archsimd.Uint32x8.Sub, subSlice[uint32])
    84  		testUint64x4Binary(t, archsimd.Uint64x4.Sub, subSlice[uint64])
    85  		testUint8x32Binary(t, archsimd.Uint8x32.Sub, subSlice[uint8])
    86  	}
    87  
    88  	if archsimd.X86.AVX512() {
    89  		testFloat32x16Binary(t, archsimd.Float32x16.Sub, subSlice[float32])
    90  		testFloat64x8Binary(t, archsimd.Float64x8.Sub, subSlice[float64])
    91  		testInt8x64Binary(t, archsimd.Int8x64.Sub, subSlice[int8])
    92  		testInt16x32Binary(t, archsimd.Int16x32.Sub, subSlice[int16])
    93  		testInt32x16Binary(t, archsimd.Int32x16.Sub, subSlice[int32])
    94  		testInt64x8Binary(t, archsimd.Int64x8.Sub, subSlice[int64])
    95  		testUint8x64Binary(t, archsimd.Uint8x64.Sub, subSlice[uint8])
    96  		testUint16x32Binary(t, archsimd.Uint16x32.Sub, subSlice[uint16])
    97  		testUint32x16Binary(t, archsimd.Uint32x16.Sub, subSlice[uint32])
    98  		testUint64x8Binary(t, archsimd.Uint64x8.Sub, subSlice[uint64])
    99  	}
   100  }
   101  
   102  func TestMax(t *testing.T) {
   103  	// testFloat32x4Binary(t, archsimd.Float32x4.Max, maxSlice[float32]) // nan is wrong
   104  	// testFloat32x8Binary(t, archsimd.Float32x8.Max, maxSlice[float32]) // nan is wrong
   105  	// testFloat64x2Binary(t, archsimd.Float64x2.Max, maxSlice[float64]) // nan is wrong
   106  	// testFloat64x4Binary(t, archsimd.Float64x4.Max, maxSlice[float64]) // nan is wrong
   107  
   108  	testInt16x8Binary(t, archsimd.Int16x8.Max, maxSlice[int16])
   109  	testInt32x4Binary(t, archsimd.Int32x4.Max, maxSlice[int32])
   110  
   111  	if archsimd.X86.AVX2() {
   112  		testInt16x16Binary(t, archsimd.Int16x16.Max, maxSlice[int16])
   113  		testInt32x8Binary(t, archsimd.Int32x8.Max, maxSlice[int32])
   114  	}
   115  
   116  	if archsimd.X86.AVX512() {
   117  		testInt64x2Binary(t, archsimd.Int64x2.Max, maxSlice[int64])
   118  		testInt64x4Binary(t, archsimd.Int64x4.Max, maxSlice[int64])
   119  	}
   120  
   121  	testInt8x16Binary(t, archsimd.Int8x16.Max, maxSlice[int8])
   122  
   123  	if archsimd.X86.AVX2() {
   124  		testInt8x32Binary(t, archsimd.Int8x32.Max, maxSlice[int8])
   125  	}
   126  
   127  	testUint16x8Binary(t, archsimd.Uint16x8.Max, maxSlice[uint16])
   128  	testUint32x4Binary(t, archsimd.Uint32x4.Max, maxSlice[uint32])
   129  
   130  	if archsimd.X86.AVX2() {
   131  		testUint16x16Binary(t, archsimd.Uint16x16.Max, maxSlice[uint16])
   132  		testUint32x8Binary(t, archsimd.Uint32x8.Max, maxSlice[uint32])
   133  	}
   134  
   135  	if archsimd.X86.AVX512() {
   136  		testUint64x2Binary(t, archsimd.Uint64x2.Max, maxSlice[uint64])
   137  		testUint64x4Binary(t, archsimd.Uint64x4.Max, maxSlice[uint64])
   138  	}
   139  
   140  	testUint8x16Binary(t, archsimd.Uint8x16.Max, maxSlice[uint8])
   141  
   142  	if archsimd.X86.AVX2() {
   143  		testUint8x32Binary(t, archsimd.Uint8x32.Max, maxSlice[uint8])
   144  	}
   145  
   146  	if archsimd.X86.AVX512() {
   147  		// testFloat32x16Binary(t, archsimd.Float32x16.Max, maxSlice[float32]) // nan is wrong
   148  		// testFloat64x8Binary(t, archsimd.Float64x8.Max, maxSlice[float64]) // nan is wrong
   149  		testInt8x64Binary(t, archsimd.Int8x64.Max, maxSlice[int8])
   150  		testInt16x32Binary(t, archsimd.Int16x32.Max, maxSlice[int16])
   151  		testInt32x16Binary(t, archsimd.Int32x16.Max, maxSlice[int32])
   152  		testInt64x8Binary(t, archsimd.Int64x8.Max, maxSlice[int64])
   153  		testUint8x64Binary(t, archsimd.Uint8x64.Max, maxSlice[uint8])
   154  		testUint16x32Binary(t, archsimd.Uint16x32.Max, maxSlice[uint16])
   155  		testUint32x16Binary(t, archsimd.Uint32x16.Max, maxSlice[uint32])
   156  		testUint64x8Binary(t, archsimd.Uint64x8.Max, maxSlice[uint64])
   157  	}
   158  }
   159  
   160  func TestMin(t *testing.T) {
   161  	// testFloat32x4Binary(t, archsimd.Float32x4.Min, minSlice[float32]) // nan is wrong
   162  	// testFloat32x8Binary(t, archsimd.Float32x8.Min, minSlice[float32]) // nan is wrong
   163  	// testFloat64x2Binary(t, archsimd.Float64x2.Min, minSlice[float64]) // nan is wrong
   164  	// testFloat64x4Binary(t, archsimd.Float64x4.Min, minSlice[float64]) // nan is wrong
   165  
   166  	testInt16x8Binary(t, archsimd.Int16x8.Min, minSlice[int16])
   167  	testInt32x4Binary(t, archsimd.Int32x4.Min, minSlice[int32])
   168  
   169  	if archsimd.X86.AVX2() {
   170  		testInt16x16Binary(t, archsimd.Int16x16.Min, minSlice[int16])
   171  		testInt32x8Binary(t, archsimd.Int32x8.Min, minSlice[int32])
   172  	}
   173  
   174  	if archsimd.X86.AVX512() {
   175  		testInt64x2Binary(t, archsimd.Int64x2.Min, minSlice[int64])
   176  		testInt64x4Binary(t, archsimd.Int64x4.Min, minSlice[int64])
   177  	}
   178  
   179  	testInt8x16Binary(t, archsimd.Int8x16.Min, minSlice[int8])
   180  
   181  	if archsimd.X86.AVX2() {
   182  		testInt8x32Binary(t, archsimd.Int8x32.Min, minSlice[int8])
   183  	}
   184  
   185  	testUint16x8Binary(t, archsimd.Uint16x8.Min, minSlice[uint16])
   186  	testUint32x4Binary(t, archsimd.Uint32x4.Min, minSlice[uint32])
   187  
   188  	if archsimd.X86.AVX2() {
   189  		testUint16x16Binary(t, archsimd.Uint16x16.Min, minSlice[uint16])
   190  		testUint32x8Binary(t, archsimd.Uint32x8.Min, minSlice[uint32])
   191  	}
   192  
   193  	if archsimd.X86.AVX512() {
   194  		testUint64x2Binary(t, archsimd.Uint64x2.Min, minSlice[uint64])
   195  		testUint64x4Binary(t, archsimd.Uint64x4.Min, minSlice[uint64])
   196  	}
   197  
   198  	testUint8x16Binary(t, archsimd.Uint8x16.Min, minSlice[uint8])
   199  
   200  	if archsimd.X86.AVX2() {
   201  		testUint8x32Binary(t, archsimd.Uint8x32.Min, minSlice[uint8])
   202  	}
   203  
   204  	if archsimd.X86.AVX512() {
   205  		// testFloat32x16Binary(t, archsimd.Float32x16.Min, minSlice[float32]) // nan is wrong
   206  		// testFloat64x8Binary(t, archsimd.Float64x8.Min, minSlice[float64]) // nan is wrong
   207  		testInt8x64Binary(t, archsimd.Int8x64.Min, minSlice[int8])
   208  		testInt16x32Binary(t, archsimd.Int16x32.Min, minSlice[int16])
   209  		testInt32x16Binary(t, archsimd.Int32x16.Min, minSlice[int32])
   210  		testInt64x8Binary(t, archsimd.Int64x8.Min, minSlice[int64])
   211  		testUint8x64Binary(t, archsimd.Uint8x64.Min, minSlice[uint8])
   212  		testUint16x32Binary(t, archsimd.Uint16x32.Min, minSlice[uint16])
   213  		testUint32x16Binary(t, archsimd.Uint32x16.Min, minSlice[uint32])
   214  		testUint64x8Binary(t, archsimd.Uint64x8.Min, minSlice[uint64])
   215  	}
   216  }
   217  
   218  func TestAnd(t *testing.T) {
   219  	testInt16x8Binary(t, archsimd.Int16x8.And, andSlice[int16])
   220  	testInt32x4Binary(t, archsimd.Int32x4.And, andSlice[int32])
   221  	testInt64x2Binary(t, archsimd.Int64x2.And, andSlice[int64])
   222  	testInt8x16Binary(t, archsimd.Int8x16.And, andSlice[int8])
   223  
   224  	if archsimd.X86.AVX2() {
   225  		testInt16x16Binary(t, archsimd.Int16x16.And, andSlice[int16])
   226  		testInt32x8Binary(t, archsimd.Int32x8.And, andSlice[int32])
   227  		testInt64x4Binary(t, archsimd.Int64x4.And, andSlice[int64])
   228  		testInt8x32Binary(t, archsimd.Int8x32.And, andSlice[int8])
   229  	}
   230  
   231  	testUint16x8Binary(t, archsimd.Uint16x8.And, andSlice[uint16])
   232  	testUint32x4Binary(t, archsimd.Uint32x4.And, andSlice[uint32])
   233  	testUint64x2Binary(t, archsimd.Uint64x2.And, andSlice[uint64])
   234  	testUint8x16Binary(t, archsimd.Uint8x16.And, andSlice[uint8])
   235  
   236  	if archsimd.X86.AVX2() {
   237  		testUint16x16Binary(t, archsimd.Uint16x16.And, andSlice[uint16])
   238  		testUint32x8Binary(t, archsimd.Uint32x8.And, andSlice[uint32])
   239  		testUint64x4Binary(t, archsimd.Uint64x4.And, andSlice[uint64])
   240  		testUint8x32Binary(t, archsimd.Uint8x32.And, andSlice[uint8])
   241  	}
   242  
   243  	if archsimd.X86.AVX512() {
   244  		//	testInt8x64Binary(t, archsimd.Int8x64.And, andISlice[int8]) // missing
   245  		//	testInt16x32Binary(t, archsimd.Int16x32.And, andISlice[int16]) // missing
   246  		testInt32x16Binary(t, archsimd.Int32x16.And, andSlice[int32])
   247  		testInt64x8Binary(t, archsimd.Int64x8.And, andSlice[int64])
   248  		//	testUint8x64Binary(t, archsimd.Uint8x64.And, andISlice[uint8]) // missing
   249  		//	testUint16x32Binary(t, archsimd.Uint16x32.And, andISlice[uint16]) // missing
   250  		testUint32x16Binary(t, archsimd.Uint32x16.And, andSlice[uint32])
   251  		testUint64x8Binary(t, archsimd.Uint64x8.And, andSlice[uint64])
   252  	}
   253  }
   254  
   255  func TestAndNot(t *testing.T) {
   256  	testInt16x8Binary(t, archsimd.Int16x8.AndNot, andNotSlice[int16])
   257  	testInt32x4Binary(t, archsimd.Int32x4.AndNot, andNotSlice[int32])
   258  	testInt64x2Binary(t, archsimd.Int64x2.AndNot, andNotSlice[int64])
   259  	testInt8x16Binary(t, archsimd.Int8x16.AndNot, andNotSlice[int8])
   260  
   261  	if archsimd.X86.AVX2() {
   262  		testInt16x16Binary(t, archsimd.Int16x16.AndNot, andNotSlice[int16])
   263  		testInt32x8Binary(t, archsimd.Int32x8.AndNot, andNotSlice[int32])
   264  		testInt64x4Binary(t, archsimd.Int64x4.AndNot, andNotSlice[int64])
   265  		testInt8x32Binary(t, archsimd.Int8x32.AndNot, andNotSlice[int8])
   266  	}
   267  
   268  	testUint8x16Binary(t, archsimd.Uint8x16.AndNot, andNotSlice[uint8])
   269  	testUint16x8Binary(t, archsimd.Uint16x8.AndNot, andNotSlice[uint16])
   270  	testUint32x4Binary(t, archsimd.Uint32x4.AndNot, andNotSlice[uint32])
   271  	testUint64x2Binary(t, archsimd.Uint64x2.AndNot, andNotSlice[uint64])
   272  
   273  	if archsimd.X86.AVX2() {
   274  		testUint16x16Binary(t, archsimd.Uint16x16.AndNot, andNotSlice[uint16])
   275  		testUint32x8Binary(t, archsimd.Uint32x8.AndNot, andNotSlice[uint32])
   276  		testUint64x4Binary(t, archsimd.Uint64x4.AndNot, andNotSlice[uint64])
   277  		testUint8x32Binary(t, archsimd.Uint8x32.AndNot, andNotSlice[uint8])
   278  	}
   279  
   280  	if archsimd.X86.AVX512() {
   281  		testInt8x64Binary(t, archsimd.Int8x64.AndNot, andNotSlice[int8])
   282  		testInt16x32Binary(t, archsimd.Int16x32.AndNot, andNotSlice[int16])
   283  		testInt32x16Binary(t, archsimd.Int32x16.AndNot, andNotSlice[int32])
   284  		testInt64x8Binary(t, archsimd.Int64x8.AndNot, andNotSlice[int64])
   285  		testUint8x64Binary(t, archsimd.Uint8x64.AndNot, andNotSlice[uint8])
   286  		testUint16x32Binary(t, archsimd.Uint16x32.AndNot, andNotSlice[uint16])
   287  		testUint32x16Binary(t, archsimd.Uint32x16.AndNot, andNotSlice[uint32])
   288  		testUint64x8Binary(t, archsimd.Uint64x8.AndNot, andNotSlice[uint64])
   289  	}
   290  }
   291  
   292  func TestXor(t *testing.T) {
   293  	testInt16x8Binary(t, archsimd.Int16x8.Xor, xorSlice[int16])
   294  	testInt32x4Binary(t, archsimd.Int32x4.Xor, xorSlice[int32])
   295  	testInt64x2Binary(t, archsimd.Int64x2.Xor, xorSlice[int64])
   296  	testInt8x16Binary(t, archsimd.Int8x16.Xor, xorSlice[int8])
   297  
   298  	if archsimd.X86.AVX2() {
   299  		testInt16x16Binary(t, archsimd.Int16x16.Xor, xorSlice[int16])
   300  		testInt32x8Binary(t, archsimd.Int32x8.Xor, xorSlice[int32])
   301  		testInt64x4Binary(t, archsimd.Int64x4.Xor, xorSlice[int64])
   302  		testInt8x32Binary(t, archsimd.Int8x32.Xor, xorSlice[int8])
   303  	}
   304  
   305  	testUint16x8Binary(t, archsimd.Uint16x8.Xor, xorSlice[uint16])
   306  	testUint32x4Binary(t, archsimd.Uint32x4.Xor, xorSlice[uint32])
   307  	testUint64x2Binary(t, archsimd.Uint64x2.Xor, xorSlice[uint64])
   308  	testUint8x16Binary(t, archsimd.Uint8x16.Xor, xorSlice[uint8])
   309  
   310  	if archsimd.X86.AVX2() {
   311  		testUint16x16Binary(t, archsimd.Uint16x16.Xor, xorSlice[uint16])
   312  		testUint32x8Binary(t, archsimd.Uint32x8.Xor, xorSlice[uint32])
   313  		testUint64x4Binary(t, archsimd.Uint64x4.Xor, xorSlice[uint64])
   314  		testUint8x32Binary(t, archsimd.Uint8x32.Xor, xorSlice[uint8])
   315  	}
   316  
   317  	if archsimd.X86.AVX512() {
   318  		//	testInt8x64Binary(t, archsimd.Int8x64.Xor, andISlice[int8]) // missing
   319  		//	testInt16x32Binary(t, archsimd.Int16x32.Xor, andISlice[int16]) // missing
   320  		testInt32x16Binary(t, archsimd.Int32x16.Xor, xorSlice[int32])
   321  		testInt64x8Binary(t, archsimd.Int64x8.Xor, xorSlice[int64])
   322  		//	testUint8x64Binary(t, archsimd.Uint8x64.Xor, andISlice[uint8]) // missing
   323  		//	testUint16x32Binary(t, archsimd.Uint16x32.Xor, andISlice[uint16]) // missing
   324  		testUint32x16Binary(t, archsimd.Uint32x16.Xor, xorSlice[uint32])
   325  		testUint64x8Binary(t, archsimd.Uint64x8.Xor, xorSlice[uint64])
   326  	}
   327  }
   328  
   329  func TestOr(t *testing.T) {
   330  	testInt16x8Binary(t, archsimd.Int16x8.Or, orSlice[int16])
   331  	testInt32x4Binary(t, archsimd.Int32x4.Or, orSlice[int32])
   332  	testInt64x2Binary(t, archsimd.Int64x2.Or, orSlice[int64])
   333  	testInt8x16Binary(t, archsimd.Int8x16.Or, orSlice[int8])
   334  
   335  	if archsimd.X86.AVX2() {
   336  		testInt16x16Binary(t, archsimd.Int16x16.Or, orSlice[int16])
   337  		testInt32x8Binary(t, archsimd.Int32x8.Or, orSlice[int32])
   338  		testInt64x4Binary(t, archsimd.Int64x4.Or, orSlice[int64])
   339  		testInt8x32Binary(t, archsimd.Int8x32.Or, orSlice[int8])
   340  	}
   341  
   342  	testUint16x8Binary(t, archsimd.Uint16x8.Or, orSlice[uint16])
   343  	testUint32x4Binary(t, archsimd.Uint32x4.Or, orSlice[uint32])
   344  	testUint64x2Binary(t, archsimd.Uint64x2.Or, orSlice[uint64])
   345  	testUint8x16Binary(t, archsimd.Uint8x16.Or, orSlice[uint8])
   346  
   347  	if archsimd.X86.AVX2() {
   348  		testUint16x16Binary(t, archsimd.Uint16x16.Or, orSlice[uint16])
   349  		testUint32x8Binary(t, archsimd.Uint32x8.Or, orSlice[uint32])
   350  		testUint64x4Binary(t, archsimd.Uint64x4.Or, orSlice[uint64])
   351  		testUint8x32Binary(t, archsimd.Uint8x32.Or, orSlice[uint8])
   352  	}
   353  
   354  	if archsimd.X86.AVX512() {
   355  		//	testInt8x64Binary(t, archsimd.Int8x64.Or, andISlice[int8]) // missing
   356  		//	testInt16x32Binary(t, archsimd.Int16x32.Or, andISlice[int16]) // missing
   357  		testInt32x16Binary(t, archsimd.Int32x16.Or, orSlice[int32])
   358  		testInt64x8Binary(t, archsimd.Int64x8.Or, orSlice[int64])
   359  		//	testUint8x64Binary(t, archsimd.Uint8x64.Or, andISlice[uint8]) // missing
   360  		//	testUint16x32Binary(t, archsimd.Uint16x32.Or, andISlice[uint16]) // missing
   361  		testUint32x16Binary(t, archsimd.Uint32x16.Or, orSlice[uint32])
   362  		testUint64x8Binary(t, archsimd.Uint64x8.Or, orSlice[uint64])
   363  	}
   364  }
   365  
   366  func TestMul(t *testing.T) {
   367  	testFloat32x4Binary(t, archsimd.Float32x4.Mul, mulSlice[float32])
   368  	testFloat32x8Binary(t, archsimd.Float32x8.Mul, mulSlice[float32])
   369  	testFloat64x2Binary(t, archsimd.Float64x2.Mul, mulSlice[float64])
   370  	testFloat64x4Binary(t, archsimd.Float64x4.Mul, mulSlice[float64])
   371  
   372  	testInt16x8Binary(t, archsimd.Int16x8.Mul, mulSlice[int16])
   373  	testInt32x4Binary(t, archsimd.Int32x4.Mul, mulSlice[int32])
   374  
   375  	if archsimd.X86.AVX2() {
   376  		testInt16x16Binary(t, archsimd.Int16x16.Mul, mulSlice[int16])
   377  		testInt32x8Binary(t, archsimd.Int32x8.Mul, mulSlice[int32])
   378  	}
   379  
   380  	// testInt8x16Binary(t, archsimd.Int8x16.Mul, mulSlice[int8]) // nope
   381  	// testInt8x32Binary(t, archsimd.Int8x32.Mul, mulSlice[int8])
   382  
   383  	// TODO we should be able to do these, there's no difference between signed/unsigned Mul
   384  	// testUint16x16Binary(t, archsimd.Uint16x16.Mul, mulSlice[uint16])
   385  	// testUint16x8Binary(t, archsimd.Uint16x8.Mul, mulSlice[uint16])
   386  	// testUint32x4Binary(t, archsimd.Uint32x4.Mul, mulSlice[uint32])
   387  	// testUint32x8Binary(t, archsimd.Uint32x8.Mul, mulSlice[uint32])
   388  	// testUint64x2Binary(t, archsimd.Uint64x2.Mul, mulSlice[uint64])
   389  	// testUint64x4Binary(t, archsimd.Uint64x4.Mul, mulSlice[uint64])
   390  
   391  	// testUint8x16Binary(t, archsimd.Uint8x16.Mul, mulSlice[uint8]) // nope
   392  	// testUint8x32Binary(t, archsimd.Uint8x32.Mul, mulSlice[uint8])
   393  
   394  	if archsimd.X86.AVX512() {
   395  		testInt64x2Binary(t, archsimd.Int64x2.Mul, mulSlice[int64]) // avx512 only
   396  		testInt64x4Binary(t, archsimd.Int64x4.Mul, mulSlice[int64])
   397  
   398  		testFloat32x16Binary(t, archsimd.Float32x16.Mul, mulSlice[float32])
   399  		testFloat64x8Binary(t, archsimd.Float64x8.Mul, mulSlice[float64])
   400  
   401  		// testInt8x64Binary(t, archsimd.Int8x64.Mul, mulSlice[int8]) // nope
   402  		testInt16x32Binary(t, archsimd.Int16x32.Mul, mulSlice[int16])
   403  		testInt32x16Binary(t, archsimd.Int32x16.Mul, mulSlice[int32])
   404  		testInt64x8Binary(t, archsimd.Int64x8.Mul, mulSlice[int64])
   405  		// testUint8x64Binary(t, archsimd.Uint8x64.Mul, mulSlice[uint8]) // nope
   406  
   407  		// TODO signed should do the job
   408  		// testUint16x32Binary(t, archsimd.Uint16x32.Mul, mulSlice[uint16])
   409  		// testUint32x16Binary(t, archsimd.Uint32x16.Mul, mulSlice[uint32])
   410  		// testUint64x8Binary(t, archsimd.Uint64x8.Mul, mulSlice[uint64])
   411  	}
   412  }
   413  
   414  func TestDiv(t *testing.T) {
   415  	testFloat32x4Binary(t, archsimd.Float32x4.Div, divSlice[float32])
   416  	testFloat32x8Binary(t, archsimd.Float32x8.Div, divSlice[float32])
   417  	testFloat64x2Binary(t, archsimd.Float64x2.Div, divSlice[float64])
   418  	testFloat64x4Binary(t, archsimd.Float64x4.Div, divSlice[float64])
   419  
   420  	if archsimd.X86.AVX512() {
   421  		testFloat32x16Binary(t, archsimd.Float32x16.Div, divSlice[float32])
   422  		testFloat64x8Binary(t, archsimd.Float64x8.Div, divSlice[float64])
   423  	}
   424  }
   425  

View as plain text