2 * Copyright (c) 1997-1999 Massachusetts Institute of Technology
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License as published by
6 * the Free Software Foundation; either version 2 of the License, or
7 * (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, write to the Free Software
16 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 /* This file was automatically generated --- DO NOT EDIT */
21 /* Generated on Tue May 18 13:54:55 EDT 1999 */
26 /* Generated by: ./genfft -magic-alignment-check -magic-twiddle-load-all -magic-variables 4 -magic-loopi -notwiddleinv 15 */
29 * This function contains 156 FP additions, 56 FP multiplications,
30 * (or, 128 additions, 28 multiplications, 28 fused multiply/add),
31 * 62 stack variables, and 60 memory accesses
33 static const fftw_real K951056516
= FFTW_KONST(+0.951056516295153572116439333379382143405698634);
34 static const fftw_real K587785252
= FFTW_KONST(+0.587785252292473129168705954639072768597652438);
35 static const fftw_real K250000000
= FFTW_KONST(+0.250000000000000000000000000000000000000000000);
36 static const fftw_real K559016994
= FFTW_KONST(+0.559016994374947424102293417182819058860154590);
37 static const fftw_real K500000000
= FFTW_KONST(+0.500000000000000000000000000000000000000000000);
38 static const fftw_real K866025403
= FFTW_KONST(+0.866025403784438646763723170752936183471402627);
47 void fftwi_no_twiddle_15(const fftw_complex
*input
, fftw_complex
*output
, int istride
, int ostride
)
91 ASSERT_ALIGNED_DOUBLE();
101 ASSERT_ALIGNED_DOUBLE();
102 tmp1
= c_re(input
[0]);
103 tmp30
= c_im(input
[0]);
109 ASSERT_ALIGNED_DOUBLE();
110 tmp2
= c_re(input
[5 * istride
]);
111 tmp3
= c_re(input
[10 * istride
]);
113 tmp29
= K866025403
* (tmp2
- tmp3
);
114 tmp31
= c_im(input
[5 * istride
]);
115 tmp32
= c_im(input
[10 * istride
]);
116 tmp33
= tmp31
+ tmp32
;
117 tmp120
= K866025403
* (tmp32
- tmp31
);
120 tmp119
= tmp1
- (K500000000
* tmp4
);
121 tmp121
= tmp119
- tmp120
;
122 tmp148
= tmp119
+ tmp120
;
123 tmp87
= tmp30
+ tmp33
;
124 tmp34
= tmp30
- (K500000000
* tmp33
);
125 tmp35
= tmp29
+ tmp34
;
126 tmp67
= tmp34
- tmp29
;
145 ASSERT_ALIGNED_DOUBLE();
151 ASSERT_ALIGNED_DOUBLE();
152 tmp17
= c_re(input
[6 * istride
]);
153 tmp18
= c_re(input
[11 * istride
]);
154 tmp19
= c_re(input
[istride
]);
155 tmp20
= tmp18
+ tmp19
;
156 tmp51
= K866025403
* (tmp18
- tmp19
);
157 tmp109
= tmp17
- (K500000000
* tmp20
);
158 tmp52
= c_im(input
[6 * istride
]);
159 tmp53
= c_im(input
[11 * istride
]);
160 tmp54
= c_im(input
[istride
]);
161 tmp55
= tmp53
+ tmp54
;
162 tmp56
= tmp52
- (K500000000
* tmp55
);
163 tmp110
= K866025403
* (tmp54
- tmp53
);
170 ASSERT_ALIGNED_DOUBLE();
171 tmp22
= c_re(input
[9 * istride
]);
172 tmp23
= c_re(input
[14 * istride
]);
173 tmp24
= c_re(input
[4 * istride
]);
174 tmp25
= tmp23
+ tmp24
;
175 tmp58
= K866025403
* (tmp23
- tmp24
);
176 tmp112
= tmp22
- (K500000000
* tmp25
);
177 tmp59
= c_im(input
[9 * istride
]);
178 tmp60
= c_im(input
[14 * istride
]);
179 tmp61
= c_im(input
[4 * istride
]);
180 tmp62
= tmp60
+ tmp61
;
181 tmp63
= tmp59
- (K500000000
* tmp62
);
182 tmp113
= K866025403
* (tmp61
- tmp60
);
184 tmp21
= tmp17
+ tmp20
;
185 tmp26
= tmp22
+ tmp25
;
186 tmp27
= tmp21
+ tmp26
;
187 tmp111
= tmp109
- tmp110
;
188 tmp114
= tmp112
- tmp113
;
189 tmp123
= tmp111
+ tmp114
;
190 tmp139
= tmp109
+ tmp110
;
191 tmp140
= tmp112
+ tmp113
;
192 tmp146
= tmp139
+ tmp140
;
193 tmp81
= tmp52
+ tmp55
;
194 tmp82
= tmp59
+ tmp62
;
195 tmp89
= tmp81
+ tmp82
;
196 tmp71
= tmp56
- tmp51
;
197 tmp72
= tmp63
- tmp58
;
198 tmp73
= tmp71
+ tmp72
;
199 tmp57
= tmp51
+ tmp56
;
200 tmp64
= tmp58
+ tmp63
;
201 tmp65
= tmp57
+ tmp64
;
220 ASSERT_ALIGNED_DOUBLE();
226 ASSERT_ALIGNED_DOUBLE();
227 tmp6
= c_re(input
[3 * istride
]);
228 tmp7
= c_re(input
[8 * istride
]);
229 tmp8
= c_re(input
[13 * istride
]);
231 tmp36
= K866025403
* (tmp7
- tmp8
);
232 tmp102
= tmp6
- (K500000000
* tmp9
);
233 tmp37
= c_im(input
[3 * istride
]);
234 tmp38
= c_im(input
[8 * istride
]);
235 tmp39
= c_im(input
[13 * istride
]);
236 tmp40
= tmp38
+ tmp39
;
237 tmp41
= tmp37
- (K500000000
* tmp40
);
238 tmp103
= K866025403
* (tmp39
- tmp38
);
245 ASSERT_ALIGNED_DOUBLE();
246 tmp11
= c_re(input
[12 * istride
]);
247 tmp12
= c_re(input
[2 * istride
]);
248 tmp13
= c_re(input
[7 * istride
]);
249 tmp14
= tmp12
+ tmp13
;
250 tmp43
= K866025403
* (tmp12
- tmp13
);
251 tmp105
= tmp11
- (K500000000
* tmp14
);
252 tmp44
= c_im(input
[12 * istride
]);
253 tmp45
= c_im(input
[2 * istride
]);
254 tmp46
= c_im(input
[7 * istride
]);
255 tmp47
= tmp45
+ tmp46
;
256 tmp48
= tmp44
- (K500000000
* tmp47
);
257 tmp106
= K866025403
* (tmp46
- tmp45
);
260 tmp15
= tmp11
+ tmp14
;
261 tmp16
= tmp10
+ tmp15
;
262 tmp104
= tmp102
- tmp103
;
263 tmp107
= tmp105
- tmp106
;
264 tmp122
= tmp104
+ tmp107
;
265 tmp136
= tmp102
+ tmp103
;
266 tmp137
= tmp105
+ tmp106
;
267 tmp145
= tmp136
+ tmp137
;
268 tmp78
= tmp37
+ tmp40
;
269 tmp79
= tmp44
+ tmp47
;
270 tmp88
= tmp78
+ tmp79
;
271 tmp68
= tmp41
- tmp36
;
272 tmp69
= tmp48
- tmp43
;
273 tmp70
= tmp68
+ tmp69
;
274 tmp42
= tmp36
+ tmp41
;
275 tmp49
= tmp43
+ tmp48
;
276 tmp50
= tmp42
+ tmp49
;
288 ASSERT_ALIGNED_DOUBLE();
289 tmp76
= K559016994
* (tmp16
- tmp27
);
290 tmp28
= tmp16
+ tmp27
;
291 tmp75
= tmp5
- (K250000000
* tmp28
);
292 tmp80
= tmp78
- tmp79
;
293 tmp83
= tmp81
- tmp82
;
294 tmp84
= (K587785252
* tmp80
) - (K951056516
* tmp83
);
295 tmp86
= (K951056516
* tmp80
) + (K587785252
* tmp83
);
296 c_re(output
[0]) = tmp5
+ tmp28
;
297 tmp85
= tmp76
+ tmp75
;
298 c_re(output
[6 * ostride
]) = tmp85
- tmp86
;
299 c_re(output
[9 * ostride
]) = tmp85
+ tmp86
;
300 tmp77
= tmp75
- tmp76
;
301 c_re(output
[12 * ostride
]) = tmp77
- tmp84
;
302 c_re(output
[3 * ostride
]) = tmp77
+ tmp84
;
314 ASSERT_ALIGNED_DOUBLE();
315 tmp134
= K559016994
* (tmp50
- tmp65
);
316 tmp66
= tmp50
+ tmp65
;
317 tmp133
= tmp35
- (K250000000
* tmp66
);
318 tmp138
= tmp136
- tmp137
;
319 tmp141
= tmp139
- tmp140
;
320 tmp142
= (K587785252
* tmp138
) - (K951056516
* tmp141
);
321 tmp144
= (K951056516
* tmp138
) + (K587785252
* tmp141
);
322 c_im(output
[10 * ostride
]) = tmp35
+ tmp66
;
323 tmp143
= tmp134
+ tmp133
;
324 c_im(output
[4 * ostride
]) = tmp143
- tmp144
;
325 c_im(output
[ostride
]) = tmp143
+ tmp144
;
326 tmp135
= tmp133
- tmp134
;
327 c_im(output
[13 * ostride
]) = tmp135
- tmp142
;
328 c_im(output
[7 * ostride
]) = tmp135
+ tmp142
;
340 ASSERT_ALIGNED_DOUBLE();
341 tmp147
= K559016994
* (tmp145
- tmp146
);
342 tmp149
= tmp145
+ tmp146
;
343 tmp150
= tmp148
- (K250000000
* tmp149
);
344 tmp152
= tmp42
- tmp49
;
345 tmp153
= tmp57
- tmp64
;
346 tmp154
= (K951056516
* tmp152
) + (K587785252
* tmp153
);
347 tmp156
= (K587785252
* tmp152
) - (K951056516
* tmp153
);
348 c_re(output
[10 * ostride
]) = tmp148
+ tmp149
;
349 tmp155
= tmp150
- tmp147
;
350 c_re(output
[7 * ostride
]) = tmp155
- tmp156
;
351 c_re(output
[13 * ostride
]) = tmp156
+ tmp155
;
352 tmp151
= tmp147
+ tmp150
;
353 c_re(output
[ostride
]) = tmp151
- tmp154
;
354 c_re(output
[4 * ostride
]) = tmp154
+ tmp151
;
366 ASSERT_ALIGNED_DOUBLE();
367 tmp126
= K559016994
* (tmp122
- tmp123
);
368 tmp124
= tmp122
+ tmp123
;
369 tmp125
= tmp121
- (K250000000
* tmp124
);
370 tmp128
= tmp68
- tmp69
;
371 tmp129
= tmp71
- tmp72
;
372 tmp130
= (K587785252
* tmp128
) - (K951056516
* tmp129
);
373 tmp132
= (K951056516
* tmp128
) + (K587785252
* tmp129
);
374 c_re(output
[5 * ostride
]) = tmp121
+ tmp124
;
375 tmp131
= tmp126
+ tmp125
;
376 c_re(output
[11 * ostride
]) = tmp131
- tmp132
;
377 c_re(output
[14 * ostride
]) = tmp132
+ tmp131
;
378 tmp127
= tmp125
- tmp126
;
379 c_re(output
[2 * ostride
]) = tmp127
- tmp130
;
380 c_re(output
[8 * ostride
]) = tmp130
+ tmp127
;
392 ASSERT_ALIGNED_DOUBLE();
393 tmp92
= K559016994
* (tmp88
- tmp89
);
394 tmp90
= tmp88
+ tmp89
;
395 tmp91
= tmp87
- (K250000000
* tmp90
);
396 tmp94
= tmp10
- tmp15
;
397 tmp95
= tmp21
- tmp26
;
398 tmp96
= (K587785252
* tmp94
) - (K951056516
* tmp95
);
399 tmp97
= (K951056516
* tmp94
) + (K587785252
* tmp95
);
400 c_im(output
[0]) = tmp87
+ tmp90
;
401 tmp98
= tmp92
+ tmp91
;
402 c_im(output
[6 * ostride
]) = tmp97
+ tmp98
;
403 c_im(output
[9 * ostride
]) = tmp98
- tmp97
;
404 tmp93
= tmp91
- tmp92
;
405 c_im(output
[3 * ostride
]) = tmp93
- tmp96
;
406 c_im(output
[12 * ostride
]) = tmp96
+ tmp93
;
418 ASSERT_ALIGNED_DOUBLE();
419 tmp100
= K559016994
* (tmp70
- tmp73
);
420 tmp74
= tmp70
+ tmp73
;
421 tmp99
= tmp67
- (K250000000
* tmp74
);
422 tmp108
= tmp104
- tmp107
;
423 tmp115
= tmp111
- tmp114
;
424 tmp116
= (K587785252
* tmp108
) - (K951056516
* tmp115
);
425 tmp118
= (K951056516
* tmp108
) + (K587785252
* tmp115
);
426 c_im(output
[5 * ostride
]) = tmp67
+ tmp74
;
427 tmp117
= tmp100
+ tmp99
;
428 c_im(output
[14 * ostride
]) = tmp117
- tmp118
;
429 c_im(output
[11 * ostride
]) = tmp117
+ tmp118
;
430 tmp101
= tmp99
- tmp100
;
431 c_im(output
[8 * ostride
]) = tmp101
- tmp116
;
432 c_im(output
[2 * ostride
]) = tmp101
+ tmp116
;
436 fftw_codelet_desc fftwi_no_twiddle_15_desc
=
438 "fftwi_no_twiddle_15",
439 (void (*)()) fftwi_no_twiddle_15
,