Version
menu_open
link
Wwise SDK 2019.2.15
AkSimdShuffle.h
Go to the documentation of this file.
1 /*******************************************************************************
2 The content of this file includes portions of the AUDIOKINETIC Wwise Technology
3 released in source code form as part of the SDK installer package.
4 
5 Commercial License Usage
6 
7 Licensees holding valid commercial licenses to the AUDIOKINETIC Wwise Technology
8 may use this file in accordance with the end user license agreement provided
9 with the software or, alternatively, in accordance with the terms contained in a
10 written agreement between you and Audiokinetic Inc.
11 
12 Apache License Usage
13 
14 Alternatively, this file may be used under the Apache License, Version 2.0 (the
15 "Apache License"); you may not use this file except in compliance with the
16 Apache License. You may obtain a copy of the Apache License at
17 http://www.apache.org/licenses/LICENSE-2.0.
18 
19 Unless required by applicable law or agreed to in writing, software distributed
20 under the Apache License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
21 OR CONDITIONS OF ANY KIND, either express or implied. See the Apache License for
22 the specific language governing permissions and limitations under the License.
23 
24  Version: <VERSION> Build: <BUILDNUMBER>
25  Copyright (c) <COPYRIGHTYEAR> Audiokinetic Inc.
26 *******************************************************************************/
27 
28 /// \file
29 /// _AKSIMD_LOCAL::SHUFFLE_V4F32<zyxw>(a, b) - arm_neon implementation
30 
31 #ifndef __AK_SIMD_SHUFFLE_H__
32 #define __AK_SIMD_SHUFFLE_H__
33 
34 namespace _AKSIMD_LOCAL
35 {
36  // Same as _mm_shuffle_ps( a, b, _MM_SHUFFLE( z, y, x, w ) ). There is no default implementation
37  // of this template function. Every required combination of zyxw needs to be explicitely implemented
38  // below.
39  template< int zyxw > AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32( const AKSIMD_V4F32& a, const AKSIMD_V4F32& b );
40 
42  {
43  //return akshuffle_xxaa( xyzw, abcd );
44  AKSIMD_V2F32 xx = vdup_lane_f32( vget_low_f32( xyzw ), 0 );
45  AKSIMD_V2F32 aa = vdup_lane_f32( vget_low_f32( abcd ), 0 );
46  AKSIMD_V4F32 xxaa = vcombine_f32( xx, aa );
47  return xxaa;
48  }
49  /* 1 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxaa( xyzw, abcd ); }
50  /* 2 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxaa( xyzw, abcd ); }
51  /* 3 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxaa( xyzw, abcd ); }
52  /* 4 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xyaa( xyzw, abcd ); }
53  /* 5 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yyaa( xyzw, abcd ); }
54  /* 6 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zyaa( xyzw, abcd ); }
55  /* 7 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wyaa( xyzw, abcd ); }
56  /* 8 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzaa( xyzw, abcd ); }
57  /* 9 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzaa( xyzw, abcd ); }
58  /* 10 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzaa( xyzw, abcd ); }
59  /* 11 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzaa( xyzw, abcd ); }
60  /* 12 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwaa( xyzw, abcd ); }
61  /* 13 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywaa( xyzw, abcd ); }
62  /* 14 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 0, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwaa( xyzw, abcd ); }
64  {
65  //return akshuffle_wwaa( xyzw, abcd );
66  AKSIMD_V2F32 ww = vdup_lane_f32( vget_high_f32( xyzw ), 1 );
67  AKSIMD_V2F32 aa = vdup_lane_f32( vget_low_f32( abcd ), 0 );
68  AKSIMD_V4F32 wwaa = vcombine_f32( ww, aa );
69  return wwaa;
70 
71  }
72  /* 16 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxba( xyzw, abcd ); }
74  {
75  //return akshuffle_yxba( xyzw, abcd );
76  AKSIMD_V2F32 yx = vrev64_f32( vget_low_f32( xyzw ) );
77  AKSIMD_V2F32 ba = vrev64_f32( vget_low_f32( abcd ) );
78  AKSIMD_V4F32 yxba = vcombine_f32( yx , ba );
79  return yxba;
80  }
81  /* 18 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxba( xyzw, abcd ); }
82  /* 19 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxba( xyzw, abcd ); }
83  /* 20 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xyba( xyzw, abcd ); }
84  /* 21 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yyba( xyzw, abcd ); }
85  /* 22 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zyba( xyzw, abcd ); }
86  /* 23 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wyba( xyzw, abcd ); }
87  /* 24 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzba( xyzw, abcd ); }
88  /* 25 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzba( xyzw, abcd ); }
89  /* 26 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzba( xyzw, abcd ); }
90  /* 27 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzba( xyzw, abcd ); }
91  /* 28 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwba( xyzw, abcd ); }
92  /* 29 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywba( xyzw, abcd ); }
94  {
95  //return akshuffle_zwba( xyzw, abcd );
96  AKSIMD_V2F32 zw = vget_high_f32( xyzw );
97  AKSIMD_V2F32 ba = vrev64_f32( vget_low_f32( abcd ) );
98  AKSIMD_V4F32 zwba = vcombine_f32( zw , ba );
99  return zwba;
100  }
101  /* 31 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 1, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwba( xyzw, abcd ); }
102  /* 32 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxca( xyzw, abcd ); }
103  /* 33 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxca( xyzw, abcd ); }
104  /* 34 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxca( xyzw, abcd ); }
105  /* 35 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxca( xyzw, abcd ); }
106  /* 36 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xyca( xyzw, abcd ); }
107  /* 37 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yyca( xyzw, abcd ); }
108  /* 38 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zyca( xyzw, abcd ); }
109  /* 39 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wyca( xyzw, abcd ); }
110  /* 40 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzca( xyzw, abcd ); }
111  /* 41 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzca( xyzw, abcd ); }
112  /* 42 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzca( xyzw, abcd ); }
113  /* 43 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzca( xyzw, abcd ); }
114  /* 44 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwca( xyzw, abcd ); }
115  /* 45 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywca( xyzw, abcd ); }
116  /* 46 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwca( xyzw, abcd ); }
117  /* 47 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 2, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwca( xyzw, abcd ); }
118  /* 48 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxda( xyzw, abcd ); }
119  /* 49 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxda( xyzw, abcd ); }
120  /* 50 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxda( xyzw, abcd ); }
121  /* 51 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxda( xyzw, abcd ); }
122  /* 52 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xyda( xyzw, abcd ); }
123  /* 53 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yyda( xyzw, abcd ); }
125  {
126  //return akshuffle_zyda( xyzw, abcd );
127  AKSIMD_V4F32 bcda = vextq_f32( abcd, abcd, 1 );
128  AKSIMD_V4F32 yzwx = vextq_f32( xyzw, xyzw, 1 );
129  AKSIMD_V4F32 zyda = vcombine_f32( vrev64_f32( vget_low_f32( yzwx ) ), vget_high_f32( bcda ) );
130  return zyda;
131  }
132  /* 55 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wyda( xyzw, abcd ); }
133  /* 56 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzda( xyzw, abcd ); }
135  {
136  float32x2_t a21 = vget_high_f32(vextq_f32(a, a, 3));
137  float32x2_t b03 = vget_low_f32(vextq_f32(b, b, 3));
138  return vcombine_f32(a21, b03);
139  }
140  /* 58 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzda( xyzw, abcd ); }
141  /* 59 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzda( xyzw, abcd ); }
142  /* 60 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwda( xyzw, abcd ); }
143  /* 61 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywda( xyzw, abcd ); }
144  /* 62 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwda( xyzw, abcd ); }
145  /* 63 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(0, 3, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwda( xyzw, abcd ); }
146  /* 64 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxab( xyzw, abcd ); }
147  /* 65 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxab( xyzw, abcd ); }
148  /* 66 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxab( xyzw, abcd ); }
149  /* 67 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxab( xyzw, abcd ); }
151  {
152  //return akshuffle_xyab( xyzw, abcd );
153  AKSIMD_V4F32 xyab = vcombine_f32( vget_low_f32( xyzw ) , vget_low_f32( abcd ) );
154  return xyab;
155  }
156  /* 69 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yyab( xyzw, abcd ); }
157  /* 70 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zyab( xyzw, abcd ); }
158  /* 71 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wyab( xyzw, abcd ); }
159  /* 72 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzab( xyzw, abcd ); }
161  {
162  //return akshuffle_yzab( xyzw, abcd );
163  AKSIMD_V4F32 yzwx = vextq_f32( xyzw, xyzw, 1 );
164  AKSIMD_V2F32 yz = vget_low_f32( yzwx );
165  AKSIMD_V2F32 ab = vget_low_f32( abcd );
166  AKSIMD_V4F32 yzab = vcombine_f32( yz, ab );
167  return yzab;
168  }
169  /* 74 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzab( xyzw, abcd ); }
170  /* 75 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzab( xyzw, abcd ); }
171  /* 76 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwab( xyzw, abcd ); }
172  /* 77 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywab( xyzw, abcd ); }
174  {
175  //return akshuffle_zwab( xyzw, abcd );
176  AKSIMD_V2F32 zw = vget_high_f32( xyzw );
177  AKSIMD_V2F32 ab = vget_low_f32( abcd );
178  AKSIMD_V4F32 zwab = vcombine_f32( zw, ab );
179  return zwab;
180  }
181  /* 79 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 0, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwab( xyzw, abcd ); }
182  /* 80 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxbb( xyzw, abcd ); }
183  /* 81 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxbb( xyzw, abcd ); }
184  /* 82 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxbb( xyzw, abcd ); }
185  /* 83 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxbb( xyzw, abcd ); }
186  /* 84 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xybb( xyzw, abcd ); }
188  {
189  //return akshuffle_yybb( xyzw, abcd );
190  AKSIMD_V2F32 yy = vdup_lane_f32( vget_low_f32( xyzw ), 1 );
191  AKSIMD_V2F32 bb = vdup_lane_f32( vget_low_f32( abcd ), 1 );
192  AKSIMD_V4F32 yybb = vcombine_f32( yy, bb );
193  return yybb;
194  }
195  /* 86 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zybb( xyzw, abcd ); }
196  /* 87 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wybb( xyzw, abcd ); }
197  /* 88 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzbb( xyzw, abcd ); }
198  /* 89 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzbb( xyzw, abcd ); }
199  /* 90 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzbb( xyzw, abcd ); }
200  /* 91 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzbb( xyzw, abcd ); }
201  /* 92 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwbb( xyzw, abcd ); }
202  /* 93 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywbb( xyzw, abcd ); }
203  /* 94 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwbb( xyzw, abcd ); }
204  /* 95 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 1, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwbb( xyzw, abcd ); }
205  /* 96 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxcb( xyzw, abcd ); }
206  /* 97 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxcb( xyzw, abcd ); }
207  /* 98 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxcb( xyzw, abcd ); }
208  /* 99 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxcb( xyzw, abcd ); }
209  /* 100 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xycb( xyzw, abcd ); }
210  /* 101 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yycb( xyzw, abcd ); }
211  /* 102 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zycb( xyzw, abcd ); }
212  /* 103 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wycb( xyzw, abcd ); }
213  /* 104 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzcb( xyzw, abcd ); }
214  /* 105 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzcb( xyzw, abcd ); }
215  /* 106 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzcb( xyzw, abcd ); }
216  /* 107 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzcb( xyzw, abcd ); }
217  /* 108 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwcb( xyzw, abcd ); }
218  /* 109 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywcb( xyzw, abcd ); }
219  /* 110 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwcb( xyzw, abcd ); }
220  /* 111 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 2, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwcb( xyzw, abcd ); }
221  /* 112 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxdb( xyzw, abcd ); }
222  /* 113 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxdb( xyzw, abcd ); }
224  {
225  //return akshuffle_zxdb( xyzw, abcd );
226 
227  float32x2x2_t xz_yw = vtrn_f32( vget_low_f32( xyzw ), vget_high_f32( xyzw ) );
228  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
229  AKSIMD_V4F32 zxdb = vcombine_f32( vrev64_f32( xz_yw.val[0] ), vrev64_f32( ac_bd.val[1] ) );
230 
231  return zxdb;
232  }
233  /* 115 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxdb( xyzw, abcd ); }
234  /* 116 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xydb( xyzw, abcd ); }
235  /* 117 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yydb( xyzw, abcd ); }
236  /* 118 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zydb( xyzw, abcd ); }
237  /* 119 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wydb( xyzw, abcd ); }
238  /* 120 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzdb( xyzw, abcd ); }
239  /* 121 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzdb( xyzw, abcd ); }
240  /* 122 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzdb( xyzw, abcd ); }
241  /* 123 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzdb( xyzw, abcd ); }
242  /* 124 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwdb( xyzw, abcd ); }
243  /* 125 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywdb( xyzw, abcd ); }
244  /* 126 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwdb( xyzw, abcd ); }
245  /* 127 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(1, 3, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwdb( xyzw, abcd ); }
246  /* 128 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxac( xyzw, abcd ); }
248  {
249  //return akshuffle_yxac( xyzw, abcd );
250  AKSIMD_V2F32 ac = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) ).val[0];
251  AKSIMD_V2F32 yx = vrev64_f32( vget_low_f32( xyzw ) );
252  AKSIMD_V4F32 yxac = vcombine_f32( yx, ac );
253  return yxac;
254  }
255  /* 130 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxac( xyzw, abcd ); }
256  /* 131 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxac( xyzw, abcd ); }
257  /* 132 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xyac( xyzw, abcd ); }
258  /* 133 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yyac( xyzw, abcd ); }
259  /* 134 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zyac( xyzw, abcd ); }
260  /* 135 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wyac( xyzw, abcd ); }
262  {
263  //return akshuffle_xzac( xyzw, abcd );
264  float32x2x2_t xz_yw = vtrn_f32( vget_low_f32( xyzw ), vget_high_f32( xyzw ) );
265  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
266  AKSIMD_V4F32 xzac = vcombine_f32( xz_yw.val[0], ac_bd.val[0] );
267  return xzac;
268  }
270  {
271  //return akshuffle_yzac( xyzw, abcd );
272  AKSIMD_V4F32 yzwx = vextq_f32( xyzw, xyzw, 1 );
273  AKSIMD_V2F32 yz = vget_low_f32( yzwx );
274  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
275  AKSIMD_V4F32 yzac = vcombine_f32( yz, ac_bd.val[0] );
276  return yzac;
277  }
278  /* 138 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzac( xyzw, abcd ); }
279  /* 139 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzac( xyzw, abcd ); }
281  {
282  //return akshuffle_xwac( xyzw, abcd );
283  AKSIMD_V2F32 xw = vrev64_f32( vext_f32( vget_high_f32( xyzw ) , vget_low_f32( xyzw ) , 1 ) );
284  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
285  AKSIMD_V4F32 xwac = vcombine_f32( xw, ac_bd.val[0] );;
286  return xwac;
287  }
289  {
290  //return akshuffle_ywac( xyzw, abcd );
291  float32x2x2_t xz_yw = vtrn_f32( vget_low_f32( xyzw ), vget_high_f32( xyzw ) );
292  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
293  AKSIMD_V4F32 ywac = vcombine_f32( xz_yw.val[1], ac_bd.val[0] );
294  return ywac;
295  }
296  /* 142 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwac( xyzw, abcd ); }
297  /* 143 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 0, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwac( xyzw, abcd ); }
298  /* 144 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxbc( xyzw, abcd ); }
299  /* 145 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxbc( xyzw, abcd ); }
300  /* 146 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxbc( xyzw, abcd ); }
301  /* 147 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxbc( xyzw, abcd ); }
302  /* 148 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xybc( xyzw, abcd ); }
303  /* 149 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yybc( xyzw, abcd ); }
304  /* 150 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zybc( xyzw, abcd ); }
305  /* 151 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wybc( xyzw, abcd ); }
306  /* 152 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzbc( xyzw, abcd ); }
308  {
309  //return akshuffle_yzbc( xyzw, abcd );
310  AKSIMD_V4F32 yzwx = vextq_f32( xyzw, xyzw, 1 );
311  AKSIMD_V2F32 yz = vget_low_f32( yzwx );
312  AKSIMD_V2F32 bc = vext_f32( vget_low_f32( abcd ) , vget_high_f32( abcd ) , 1 );
313  AKSIMD_V4F32 yzbc = vcombine_f32( yz , bc );
314  return yzbc;
315  }
316  /* 154 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzbc( xyzw, abcd ); }
317  /* 155 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzbc( xyzw, abcd ); }
319  {
320  //return akshuffle_xwbc( xyzw, abcd );
321  AKSIMD_V2F32 xw = vrev64_f32( vext_f32( vget_high_f32( xyzw ) , vget_low_f32( xyzw ) , 1 ) );
322  AKSIMD_V2F32 bc = vext_f32( vget_low_f32( abcd ) , vget_high_f32( abcd ) , 1 );
323  AKSIMD_V4F32 xwbc = vcombine_f32( xw, bc );
324  return xwbc;
325  }
327  {
328  //return akshuffle_ywbc( xyzw, abcd );
329  AKSIMD_V2F32 yw = vext_f32( vget_low_f32( xyzw ) , vrev64_f32( vget_high_f32( xyzw ) ), 1 );
330  AKSIMD_V2F32 bc = vext_f32( vget_low_f32( abcd ) , vget_high_f32( abcd ) , 1 );
331  AKSIMD_V4F32 ywbc = vcombine_f32( yw, bc );
332  return ywbc;
333  }
334  /* 158 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwbc( xyzw, abcd ); }
335  /* 159 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 1, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwbc( xyzw, abcd ); }
337  {
338  AKSIMD_V2F32 xx = vdup_lane_f32( vget_low_f32( xyzw ), 0 );
339  AKSIMD_V2F32 cc = vdup_lane_f32( vget_high_f32( abcd ), 0 );
340  AKSIMD_V4F32 xxcc = vcombine_f32( xx, cc );
341  return xxcc;
342  }
343  /* 161 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxcc( xyzw, abcd ); }
344  /* 162 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxcc( xyzw, abcd ); }
345  /* 163 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxcc( xyzw, abcd ); }
346  /* 164 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xycc( xyzw, abcd ); }
347  /* 165 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yycc( xyzw, abcd ); }
348  /* 166 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zycc( xyzw, abcd ); }
349  /* 167 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wycc( xyzw, abcd ); }
350  /* 168 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzcc( xyzw, abcd ); }
351  /* 169 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzcc( xyzw, abcd ); }
353  {
354  AKSIMD_V2F32 zz = vdup_lane_f32( vget_high_f32( xyzw ), 0 );
355  AKSIMD_V2F32 cc = vdup_lane_f32( vget_high_f32( abcd ), 0 );
356  AKSIMD_V4F32 zzcc = vcombine_f32( zz, cc );
357  return zzcc;
358  }
359  /* 171 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzcc( xyzw, abcd ); }
360  /* 172 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwcc( xyzw, abcd ); }
361  /* 173 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywcc( xyzw, abcd ); }
362  /* 174 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwcc( xyzw, abcd ); }
363  /* 175 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 2, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwcc( xyzw, abcd ); }
364  /* 176 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxdc( xyzw, abcd ); }
366  {
367  AKSIMD_V2F32 yx = vrev64_f32( vget_low_f32( xyzw ) );
368  AKSIMD_V2F32 dc = vrev64_f32( vget_high_f32( abcd ) );
369  AKSIMD_V4F32 yxdc = vcombine_f32( yx, dc );
370  return yxdc;
371  }
372  /* 178 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxdc( xyzw, abcd ); }
373  /* 179 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxdc( xyzw, abcd ); }
374  /* 180 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xydc( xyzw, abcd ); }
375  /* 181 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yydc( xyzw, abcd ); }
376  /* 182 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zydc( xyzw, abcd ); }
377  /* 183 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wydc( xyzw, abcd ); }
378  /* 184 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzdc( xyzw, abcd ); }
379  /* 185 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzdc( xyzw, abcd ); }
380  /* 186 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzdc( xyzw, abcd ); }
382  {
383  //return akshuffle_wzdc( xyzw, abcd );
384  AKSIMD_V2F32 wz = vrev64_f32( vget_high_f32( xyzw ) );
385  AKSIMD_V2F32 dc = vrev64_f32( vget_high_f32( abcd ) );
386  AKSIMD_V4F32 wzdc = vcombine_f32( wz , dc );
387  return wzdc;
388  }
389  /* 188 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwdc( xyzw, abcd ); }
390  /* 189 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywdc( xyzw, abcd ); }
391  /* 190 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwdc( xyzw, abcd ); }
392  /* 191 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(2, 3, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwdc( xyzw, abcd ); }
393  /* 192 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxad( xyzw, abcd ); }
394  /* 193 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxad( xyzw, abcd ); }
395  /* 194 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxad( xyzw, abcd ); }
396  /* 195 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxad( xyzw, abcd ); }
397  /* 196 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xyad( xyzw, abcd ); }
398  /* 197 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yyad( xyzw, abcd ); }
399  /* 198 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zyad( xyzw, abcd ); }
400  /* 199 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wyad( xyzw, abcd ); }
402  {
403  //return akshuffle_xzad( xyzw, abcd );
404  AKSIMD_V2F32 xz = vext_f32( vrev64_f32( vget_low_f32( xyzw ) ), vget_high_f32( xyzw ) , 1 );
405  AKSIMD_V2F32 ad = vrev64_f32( vext_f32( vget_high_f32( abcd ) , vget_low_f32( abcd ) , 1 ) );
406  AKSIMD_V4F32 xzad = vcombine_f32( xz , ad );
407  return xzad;
408  }
410  {
411  //return akshuffle_yzad( xyzw, abcd );
412  AKSIMD_V4F32 yzwx = vextq_f32( xyzw, xyzw, 1 );
413  AKSIMD_V2F32 yz = vget_low_f32( yzwx );
414  AKSIMD_V2F32 ad = vrev64_f32( vext_f32( vget_high_f32( abcd ) , vget_low_f32( abcd ) , 1 ) );
415  AKSIMD_V4F32 yzad = vcombine_f32( yz , ad );
416  return yzad;
417  }
418  /* 202 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzad( xyzw, abcd ); }
419  /* 203 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzad( xyzw, abcd ); }
421  {
422  //return akshuffle_xwad( xyzw, abcd );
423  AKSIMD_V2F32 xw = vrev64_f32( vext_f32( vget_high_f32( xyzw ) , vget_low_f32( xyzw ) , 1 ) );
424  AKSIMD_V2F32 ad = vrev64_f32( vext_f32( vget_high_f32( abcd ) , vget_low_f32( abcd ) , 1 ) );
425  AKSIMD_V4F32 xwad = vcombine_f32( xw, ad );
426  return xwad;
427  }
428  /* 205 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywad( xyzw, abcd ); }
429  /* 206 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwad( xyzw, abcd ); }
430  /* 207 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 0, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwad( xyzw, abcd ); }
431  /* 208 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxbd( xyzw, abcd ); }
432  /* 209 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxbd( xyzw, abcd ); }
433  /* 210 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxbd( xyzw, abcd ); }
434  /* 211 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxbd( xyzw, abcd ); }
435  /* 212 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xybd( xyzw, abcd ); }
436  /* 213 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yybd( xyzw, abcd ); }
437  /* 214 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zybd( xyzw, abcd ); }
438  /* 215 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wybd( xyzw, abcd ); }
440  {
441  //return akshuffle_xzbd( xyzw, abcd );
442  float32x2x2_t xz_yw = vtrn_f32( vget_low_f32( xyzw ), vget_high_f32( xyzw ) );
443  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
444  AKSIMD_V4F32 xzbd = vcombine_f32( xz_yw.val[0], ac_bd.val[1] );
445  return xzbd;
446  }
448  {
449  //return akshuffle_yzbd( xyzw, abcd );
450  AKSIMD_V4F32 yzwx = vextq_f32( xyzw, xyzw, 1 );
451  AKSIMD_V2F32 yz = vget_low_f32( yzwx );
452  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
453  AKSIMD_V4F32 yzbd = vcombine_f32( yz, ac_bd.val[1] );
454  return yzbd;
455  }
456  /* 218 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzbd( xyzw, abcd ); }
457  /* 219 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzbd( xyzw, abcd ); }
459  {
460  //return akshuffle_xwbd( xyzw, abcd );
461  AKSIMD_V2F32 xw = vrev64_f32( vext_f32( vget_high_f32( xyzw ) , vget_low_f32( xyzw ) , 1 ) );
462  AKSIMD_V2F32 bd = vext_f32( vget_low_f32( abcd ) , vrev64_f32( vget_high_f32( abcd ) ), 1 );
463 
464  AKSIMD_V4F32 xwbd = vcombine_f32( xw, bd );
465  return xwbd;
466  }
468  {
469  //return akshuffle_ywbd( xyzw, abcd );
470  float32x2x2_t xz_yw = vtrn_f32( vget_low_f32( xyzw ), vget_high_f32( xyzw ) );
471  float32x2x2_t ac_bd = vtrn_f32( vget_low_f32( abcd ), vget_high_f32( abcd ) );
472  AKSIMD_V4F32 ywbd = vcombine_f32( xz_yw.val[1], ac_bd.val[1] );
473  return ywbd;
474  }
475  /* 222 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwbd( xyzw, abcd ); }
476  /* 223 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 1, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwbd( xyzw, abcd ); }
477  /* 224 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxcd( xyzw, abcd ); }
478  /* 225 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxcd( xyzw, abcd ); }
479  /* 226 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxcd( xyzw, abcd ); }
480  /* 227 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxcd( xyzw, abcd ); }
482  {
483  //return akshuffle_xycd( xyzw, abcd );
484  AKSIMD_V2F32 xy = vget_low_f32( xyzw );
485  AKSIMD_V2F32 cd = vget_high_f32( abcd );
486  return vcombine_f32( xy, cd );
487  }
488  /* 229 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 1, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yycd( xyzw, abcd ); }
489  /* 230 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zycd( xyzw, abcd ); }
490  /* 231 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wycd( xyzw, abcd ); }
491  /* 232 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzcd( xyzw, abcd ); }
492  /* 233 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzcd( xyzw, abcd ); }
493  /* 234 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzcd( xyzw, abcd ); }
494  /* 235 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzcd( xyzw, abcd ); }
495  /* 236 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwcd( xyzw, abcd ); }
496  /* 237 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywcd( xyzw, abcd ); }
498  {
499  //return akshuffle_zwcd( xyzw, abcd );
500  AKSIMD_V2F32 zw = vget_high_f32( xyzw );
501  AKSIMD_V2F32 cd = vget_high_f32( abcd );
502  AKSIMD_V4F32 zwcd = vcombine_f32( zw , cd );
503  return zwcd;
504  }
505  /* 239 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 2, 3, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wwcd( xyzw, abcd ); }
506  /* 240 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 0, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xxdd( xyzw, abcd ); }
507  /* 241 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 0, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yxdd( xyzw, abcd ); }
508  /* 242 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 0, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zxdd( xyzw, abcd ); }
509  /* 243 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 0, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wxdd( xyzw, abcd ); }
510  /* 244 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 1, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xydd( xyzw, abcd ); }
512  {
513  AKSIMD_V2F32 yy = vdup_lane_f32( vget_low_f32( xyzw ), 1 );
514  AKSIMD_V2F32 dd = vdup_lane_f32( vget_high_f32( abcd ), 1 );
515  AKSIMD_V4F32 yydd = vcombine_f32( yy, dd );
516  return yydd;
517  }
518  /* 246 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 1, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zydd( xyzw, abcd ); }
519  /* 247 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 1, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wydd( xyzw, abcd ); }
520  /* 248 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 2, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xzdd( xyzw, abcd ); }
521  /* 249 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 2, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_yzdd( xyzw, abcd ); }
522  /* 250 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 2, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zzdd( xyzw, abcd ); }
523  /* 251 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 2, 3)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_wzdd( xyzw, abcd ); }
524  /* 252 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 3, 0)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_xwdd( xyzw, abcd ); }
525  /* 253 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 3, 1)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_ywdd( xyzw, abcd ); }
526  /* 254 */ //template<> AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32<AKSIMD_SHUFFLE(3, 3, 3, 2)>( const AKSIMD_V4F32& xyzw, const AKSIMD_V4F32& abcd ) { return akshuffle_zwdd( xyzw, abcd ); }
528  {
529  //return akshuffle_wwdd( xyzw, abcd );
530  AKSIMD_V2F32 ww = vdup_lane_f32( vget_high_f32( xyzw ), 1 );
531  AKSIMD_V2F32 dd = vdup_lane_f32( vget_high_f32( abcd ), 1 );
532  AKSIMD_V4F32 wwdd = vcombine_f32( ww, dd );
533  return wwdd;
534  }
535 }
536 
537 #endif // __AK_SIMD_SHUFFLE_H__
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 1, 2, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:439
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 0, 3, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:420
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 2, 0, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:336
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(0, 1, 3, 2)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:93
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(1, 0, 1, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:150
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 1, 3, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:326
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 3, 3, 3)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:527
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 0, 3, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:280
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32(const AKSIMD_V4F32 &a, const AKSIMD_V4F32 &b)
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(1, 0, 2, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:160
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 3, 2, 3)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:381
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 0, 2, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:261
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 0, 2, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:409
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 0, 2, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:401
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 2, 1, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:481
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 3, 1, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:511
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(0, 1, 0, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:73
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(0, 0, 3, 3)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:63
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(0, 3, 1, 2)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:124
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(1, 0, 3, 2)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:173
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 2, 2, 2)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:352
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(1, 3, 0, 2)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:223
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 0, 3, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:288
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 1, 3, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:318
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 3, 0, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:365
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 0, 0, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:247
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 2, 3, 2)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:497
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(0, 3, 2, 1)>(const AKSIMD_V4F32 &a, const AKSIMD_V4F32 &b)
Definition: AkSimdShuffle.h:134
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(0, 0, 0, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:41
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 1, 3, 0)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:458
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 0, 2, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:269
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(2, 1, 2, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:307
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 1, 2, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:447
#define AkForceInline
Definition: AkTypes.h:62
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(3, 1, 3, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:467
AkForceInline AKSIMD_V4F32 SHUFFLE_V4F32< AKSIMD_SHUFFLE(1, 1, 1, 1)>(const AKSIMD_V4F32 &xyzw, const AKSIMD_V4F32 &abcd)
Definition: AkSimdShuffle.h:187

Was this page helpful?

Need Support?

Questions? Problems? Need more info? Contact us, and we can help!

Visit our Support page

Tell us about your project. We're here to help.

Register your project and we'll help you get started with no strings attached!

Get started with Wwise