summaryrefslogtreecommitdiffstats
path: root/llvm/test/CodeGen/X86/i64-to-float.ll
blob: 183024fc93f7312b579786c9df9f0c5d4a226123 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
; RUN: llc < %s -mtriple=i686-unknown -mattr=+sse2 | FileCheck %s --check-prefix=X32-SSE
; RUN: llc < %s -mtriple=i686-unknown -mattr=+avx | FileCheck %s --check-prefix=X32-AVX
; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+sse2 | FileCheck %s --check-prefix=X64-SSE
; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+avx | FileCheck %s --check-prefix=X64-AVX

;PR29078

define <2 x double> @mask_sitofp_2i64_2f64(<2 x i64> %a) nounwind {
; X32-SSE-LABEL: mask_sitofp_2i64_2f64:
; X32-SSE:       # BB#0:
; X32-SSE-NEXT:    pushl %ebp
; X32-SSE-NEXT:    movl %esp, %ebp
; X32-SSE-NEXT:    andl $-8, %esp
; X32-SSE-NEXT:    subl $32, %esp
; X32-SSE-NEXT:    pand {{\.LCPI.*}}, %xmm0
; X32-SSE-NEXT:    movq {{.*#+}} xmm1 = xmm0[0],zero
; X32-SSE-NEXT:    movq %xmm1, {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[2,3,0,1]
; X32-SSE-NEXT:    movq %xmm0, {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fstpl {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fstpl (%esp)
; X32-SSE-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
; X32-SSE-NEXT:    movhpd {{.*#+}} xmm0 = xmm0[0],mem[0]
; X32-SSE-NEXT:    movl %ebp, %esp
; X32-SSE-NEXT:    popl %ebp
; X32-SSE-NEXT:    retl
;
; X32-AVX-LABEL: mask_sitofp_2i64_2f64:
; X32-AVX:       # BB#0:
; X32-AVX-NEXT:    pushl %ebp
; X32-AVX-NEXT:    movl %esp, %ebp
; X32-AVX-NEXT:    andl $-8, %esp
; X32-AVX-NEXT:    subl $32, %esp
; X32-AVX-NEXT:    vpand {{\.LCPI.*}}, %xmm0, %xmm0
; X32-AVX-NEXT:    vmovq {{.*#+}} xmm1 = xmm0[0],zero
; X32-AVX-NEXT:    vmovq %xmm1, {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,3,0,1]
; X32-AVX-NEXT:    vmovq %xmm0, {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fstpl {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fstpl (%esp)
; X32-AVX-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
; X32-AVX-NEXT:    vmovhpd {{.*#+}} xmm0 = xmm0[0],mem[0]
; X32-AVX-NEXT:    movl %ebp, %esp
; X32-AVX-NEXT:    popl %ebp
; X32-AVX-NEXT:    retl
;
; X64-SSE-LABEL: mask_sitofp_2i64_2f64:
; X64-SSE:       # BB#0:
; X64-SSE-NEXT:    pand {{.*}}(%rip), %xmm0
; X64-SSE-NEXT:    movd %xmm0, %rax
; X64-SSE-NEXT:    cvtsi2sdq %rax, %xmm1
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[2,3,0,1]
; X64-SSE-NEXT:    movd %xmm0, %rax
; X64-SSE-NEXT:    xorps %xmm0, %xmm0
; X64-SSE-NEXT:    cvtsi2sdq %rax, %xmm0
; X64-SSE-NEXT:    unpcklpd {{.*#+}} xmm1 = xmm1[0],xmm0[0]
; X64-SSE-NEXT:    movapd %xmm1, %xmm0
; X64-SSE-NEXT:    retq
;
; X64-AVX-LABEL: mask_sitofp_2i64_2f64:
; X64-AVX:       # BB#0:
; X64-AVX-NEXT:    vpand {{.*}}(%rip), %xmm0, %xmm0
; X64-AVX-NEXT:    vpextrq $1, %xmm0, %rax
; X64-AVX-NEXT:    vcvtsi2sdq %rax, %xmm1, %xmm1
; X64-AVX-NEXT:    vmovq %xmm0, %rax
; X64-AVX-NEXT:    vcvtsi2sdq %rax, %xmm2, %xmm0
; X64-AVX-NEXT:    vunpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
; X64-AVX-NEXT:    retq
  %and = and <2 x i64> %a, <i64 255, i64 65535>
  %cvt = sitofp <2 x i64> %and to <2 x double>
  ret <2 x double> %cvt
}

define <2 x double> @mask_uitofp_2i64_2f64(<2 x i64> %a) nounwind {
; X32-SSE-LABEL: mask_uitofp_2i64_2f64:
; X32-SSE:       # BB#0:
; X32-SSE-NEXT:    pand {{\.LCPI.*}}, %xmm0
; X32-SSE-NEXT:    movdqa {{.*#+}} xmm1 = [1127219200,1160773632,0,0]
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm2 = xmm0[2,3,0,1]
; X32-SSE-NEXT:    punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; X32-SSE-NEXT:    movapd {{.*#+}} xmm3 = [4.503600e+15,1.934281e+25]
; X32-SSE-NEXT:    subpd %xmm3, %xmm0
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm4 = xmm0[2,3,0,1]
; X32-SSE-NEXT:    addpd %xmm4, %xmm0
; X32-SSE-NEXT:    punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
; X32-SSE-NEXT:    subpd %xmm3, %xmm2
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm1 = xmm2[2,3,0,1]
; X32-SSE-NEXT:    addpd %xmm2, %xmm1
; X32-SSE-NEXT:    unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
; X32-SSE-NEXT:    retl
;
; X32-AVX-LABEL: mask_uitofp_2i64_2f64:
; X32-AVX:       # BB#0:
; X32-AVX-NEXT:    vpand {{\.LCPI.*}}, %xmm0, %xmm0
; X32-AVX-NEXT:    vmovdqa {{.*#+}} xmm1 = [1127219200,1160773632,0,0]
; X32-AVX-NEXT:    vpunpckldq {{.*#+}} xmm2 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; X32-AVX-NEXT:    vmovapd {{.*#+}} xmm3 = [4.503600e+15,1.934281e+25]
; X32-AVX-NEXT:    vsubpd %xmm3, %xmm2, %xmm2
; X32-AVX-NEXT:    vhaddpd %xmm2, %xmm2, %xmm2
; X32-AVX-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,3,0,1]
; X32-AVX-NEXT:    vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; X32-AVX-NEXT:    vsubpd %xmm3, %xmm0, %xmm0
; X32-AVX-NEXT:    vhaddpd %xmm0, %xmm0, %xmm0
; X32-AVX-NEXT:    vunpcklpd {{.*#+}} xmm0 = xmm2[0],xmm0[0]
; X32-AVX-NEXT:    retl
;
; X64-SSE-LABEL: mask_uitofp_2i64_2f64:
; X64-SSE:       # BB#0:
; X64-SSE-NEXT:    pand {{.*}}(%rip), %xmm0
; X64-SSE-NEXT:    movdqa {{.*#+}} xmm1 = [1127219200,1160773632,0,0]
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm2 = xmm0[2,3,0,1]
; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; X64-SSE-NEXT:    movapd {{.*#+}} xmm3 = [4.503600e+15,1.934281e+25]
; X64-SSE-NEXT:    subpd %xmm3, %xmm0
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm4 = xmm0[2,3,0,1]
; X64-SSE-NEXT:    addpd %xmm4, %xmm0
; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
; X64-SSE-NEXT:    subpd %xmm3, %xmm2
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm1 = xmm2[2,3,0,1]
; X64-SSE-NEXT:    addpd %xmm2, %xmm1
; X64-SSE-NEXT:    unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
; X64-SSE-NEXT:    retq
;
; X64-AVX-LABEL: mask_uitofp_2i64_2f64:
; X64-AVX:       # BB#0:
; X64-AVX-NEXT:    vpand {{.*}}(%rip), %xmm0, %xmm0
; X64-AVX-NEXT:    vmovdqa {{.*#+}} xmm1 = [1127219200,1160773632,0,0]
; X64-AVX-NEXT:    vpunpckldq {{.*#+}} xmm2 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; X64-AVX-NEXT:    vmovapd {{.*#+}} xmm3 = [4.503600e+15,1.934281e+25]
; X64-AVX-NEXT:    vsubpd %xmm3, %xmm2, %xmm2
; X64-AVX-NEXT:    vhaddpd %xmm2, %xmm2, %xmm2
; X64-AVX-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,3,0,1]
; X64-AVX-NEXT:    vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; X64-AVX-NEXT:    vsubpd %xmm3, %xmm0, %xmm0
; X64-AVX-NEXT:    vhaddpd %xmm0, %xmm0, %xmm0
; X64-AVX-NEXT:    vunpcklpd {{.*#+}} xmm0 = xmm2[0],xmm0[0]
; X64-AVX-NEXT:    retq
  %and = and <2 x i64> %a, <i64 255, i64 65535>
  %cvt = uitofp <2 x i64> %and to <2 x double>
  ret <2 x double> %cvt
}

define <4 x float> @mask_sitofp_4i64_4f32(<4 x i64> %a) nounwind {
; X32-SSE-LABEL: mask_sitofp_4i64_4f32:
; X32-SSE:       # BB#0:
; X32-SSE-NEXT:    pushl %ebp
; X32-SSE-NEXT:    movl %esp, %ebp
; X32-SSE-NEXT:    andl $-8, %esp
; X32-SSE-NEXT:    subl $48, %esp
; X32-SSE-NEXT:    pand {{\.LCPI.*}}, %xmm0
; X32-SSE-NEXT:    pand {{\.LCPI.*}}, %xmm1
; X32-SSE-NEXT:    movq %xmm1, {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    movq %xmm0, {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[2,3,0,1]
; X32-SSE-NEXT:    movq %xmm1, {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[2,3,0,1]
; X32-SSE-NEXT:    movq %xmm0, {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fstps {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fstps (%esp)
; X32-SSE-NEXT:    movss {{.*#+}} xmm1 = mem[0],zero,zero,zero
; X32-SSE-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; X32-SSE-NEXT:    unpcklps {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; X32-SSE-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fstps {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fstps {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    movss {{.*#+}} xmm1 = mem[0],zero,zero,zero
; X32-SSE-NEXT:    movss {{.*#+}} xmm2 = mem[0],zero,zero,zero
; X32-SSE-NEXT:    unpcklps {{.*#+}} xmm2 = xmm2[0],xmm1[0],xmm2[1],xmm1[1]
; X32-SSE-NEXT:    unpcklps {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
; X32-SSE-NEXT:    movl %ebp, %esp
; X32-SSE-NEXT:    popl %ebp
; X32-SSE-NEXT:    retl
;
; X32-AVX-LABEL: mask_sitofp_4i64_4f32:
; X32-AVX:       # BB#0:
; X32-AVX-NEXT:    pushl %ebp
; X32-AVX-NEXT:    movl %esp, %ebp
; X32-AVX-NEXT:    andl $-8, %esp
; X32-AVX-NEXT:    subl $48, %esp
; X32-AVX-NEXT:    vandps {{\.LCPI.*}}, %ymm0, %ymm0
; X32-AVX-NEXT:    vpextrd $1, %xmm0, %eax
; X32-AVX-NEXT:    vpinsrd $1, %eax, %xmm0, %xmm1
; X32-AVX-NEXT:    vmovq %xmm1, {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    vpextrd $3, %xmm0, %eax
; X32-AVX-NEXT:    vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
; X32-AVX-NEXT:    vpinsrd $1, %eax, %xmm1, %xmm1
; X32-AVX-NEXT:    vmovq %xmm1, {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    vextractf128 $1, %ymm0, %xmm0
; X32-AVX-NEXT:    vpextrd $1, %xmm0, %eax
; X32-AVX-NEXT:    vpinsrd $1, %eax, %xmm0, %xmm1
; X32-AVX-NEXT:    vmovq %xmm1, {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    vpextrd $3, %xmm0, %eax
; X32-AVX-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,3,0,1]
; X32-AVX-NEXT:    vpinsrd $1, %eax, %xmm0, %xmm0
; X32-AVX-NEXT:    vmovq %xmm0, {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fstps {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fstps {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fstps {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fstps (%esp)
; X32-AVX-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; X32-AVX-NEXT:    vinsertps {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[2,3]
; X32-AVX-NEXT:    vinsertps {{.*#+}} xmm0 = xmm0[0,1],mem[0],xmm0[3]
; X32-AVX-NEXT:    vinsertps {{.*#+}} xmm0 = xmm0[0,1,2],mem[0]
; X32-AVX-NEXT:    movl %ebp, %esp
; X32-AVX-NEXT:    popl %ebp
; X32-AVX-NEXT:    vzeroupper
; X32-AVX-NEXT:    retl
;
; X64-SSE-LABEL: mask_sitofp_4i64_4f32:
; X64-SSE:       # BB#0:
; X64-SSE-NEXT:    pand {{.*}}(%rip), %xmm0
; X64-SSE-NEXT:    pand {{.*}}(%rip), %xmm1
; X64-SSE-NEXT:    movd %xmm1, %rax
; X64-SSE-NEXT:    cvtsi2ssq %rax, %xmm3
; X64-SSE-NEXT:    movd %xmm0, %rax
; X64-SSE-NEXT:    cvtsi2ssq %rax, %xmm2
; X64-SSE-NEXT:    unpcklps {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[2,3,0,1]
; X64-SSE-NEXT:    movd %xmm1, %rax
; X64-SSE-NEXT:    xorps %xmm1, %xmm1
; X64-SSE-NEXT:    cvtsi2ssq %rax, %xmm1
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[2,3,0,1]
; X64-SSE-NEXT:    movd %xmm0, %rax
; X64-SSE-NEXT:    xorps %xmm0, %xmm0
; X64-SSE-NEXT:    cvtsi2ssq %rax, %xmm0
; X64-SSE-NEXT:    unpcklps {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; X64-SSE-NEXT:    unpcklps {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
; X64-SSE-NEXT:    movaps %xmm2, %xmm0
; X64-SSE-NEXT:    retq
;
; X64-AVX-LABEL: mask_sitofp_4i64_4f32:
; X64-AVX:       # BB#0:
; X64-AVX-NEXT:    vandps {{.*}}(%rip), %ymm0, %ymm0
; X64-AVX-NEXT:    vpextrq $1, %xmm0, %rax
; X64-AVX-NEXT:    vcvtsi2ssq %rax, %xmm1, %xmm1
; X64-AVX-NEXT:    vmovq %xmm0, %rax
; X64-AVX-NEXT:    vcvtsi2ssq %rax, %xmm2, %xmm2
; X64-AVX-NEXT:    vinsertps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[2,3]
; X64-AVX-NEXT:    vextractf128 $1, %ymm0, %xmm0
; X64-AVX-NEXT:    vmovq %xmm0, %rax
; X64-AVX-NEXT:    vcvtsi2ssq %rax, %xmm3, %xmm2
; X64-AVX-NEXT:    vinsertps {{.*#+}} xmm1 = xmm1[0,1],xmm2[0],xmm1[3]
; X64-AVX-NEXT:    vpextrq $1, %xmm0, %rax
; X64-AVX-NEXT:    vcvtsi2ssq %rax, %xmm3, %xmm0
; X64-AVX-NEXT:    vinsertps {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[0]
; X64-AVX-NEXT:    vzeroupper
; X64-AVX-NEXT:    retq
  %and = and <4 x i64> %a, <i64 127, i64 255, i64 4095, i64 65535>
  %cvt = sitofp <4 x i64> %and to <4 x float>
  ret <4 x float> %cvt
}

define <4 x float> @mask_uitofp_4i64_4f32(<4 x i64> %a) nounwind {
; X32-SSE-LABEL: mask_uitofp_4i64_4f32:
; X32-SSE:       # BB#0:
; X32-SSE-NEXT:    pushl %ebp
; X32-SSE-NEXT:    movl %esp, %ebp
; X32-SSE-NEXT:    andl $-8, %esp
; X32-SSE-NEXT:    subl $48, %esp
; X32-SSE-NEXT:    pand {{\.LCPI.*}}, %xmm0
; X32-SSE-NEXT:    pand {{\.LCPI.*}}, %xmm1
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm2 = xmm1[2,3,0,1]
; X32-SSE-NEXT:    movq %xmm2, {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm2 = xmm0[2,3,0,1]
; X32-SSE-NEXT:    movq %xmm2, {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    movq %xmm1, {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    movq %xmm0, {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm2 = xmm1[3,1,2,3]
; X32-SSE-NEXT:    movd %xmm2, %eax
; X32-SSE-NEXT:    xorl %ecx, %ecx
; X32-SSE-NEXT:    testl %eax, %eax
; X32-SSE-NEXT:    setns %cl
; X32-SSE-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fadds {{\.LCPI.*}}(,%ecx,4)
; X32-SSE-NEXT:    fstps {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm2 = xmm0[3,1,2,3]
; X32-SSE-NEXT:    movd %xmm2, %eax
; X32-SSE-NEXT:    xorl %ecx, %ecx
; X32-SSE-NEXT:    testl %eax, %eax
; X32-SSE-NEXT:    setns %cl
; X32-SSE-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fadds {{\.LCPI.*}}(,%ecx,4)
; X32-SSE-NEXT:    fstps (%esp)
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[1,1,2,3]
; X32-SSE-NEXT:    movd %xmm1, %eax
; X32-SSE-NEXT:    xorl %ecx, %ecx
; X32-SSE-NEXT:    testl %eax, %eax
; X32-SSE-NEXT:    setns %cl
; X32-SSE-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fadds {{\.LCPI.*}}(,%ecx,4)
; X32-SSE-NEXT:    fstps {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3]
; X32-SSE-NEXT:    movd %xmm0, %eax
; X32-SSE-NEXT:    xorl %ecx, %ecx
; X32-SSE-NEXT:    testl %eax, %eax
; X32-SSE-NEXT:    setns %cl
; X32-SSE-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fadds {{\.LCPI.*}}(,%ecx,4)
; X32-SSE-NEXT:    fstps {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; X32-SSE-NEXT:    movss {{.*#+}} xmm1 = mem[0],zero,zero,zero
; X32-SSE-NEXT:    unpcklps {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1]
; X32-SSE-NEXT:    movss {{.*#+}} xmm2 = mem[0],zero,zero,zero
; X32-SSE-NEXT:    movss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; X32-SSE-NEXT:    unpcklps {{.*#+}} xmm0 = xmm0[0],xmm2[0],xmm0[1],xmm2[1]
; X32-SSE-NEXT:    unpcklps {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; X32-SSE-NEXT:    movl %ebp, %esp
; X32-SSE-NEXT:    popl %ebp
; X32-SSE-NEXT:    retl
;
; X32-AVX-LABEL: mask_uitofp_4i64_4f32:
; X32-AVX:       # BB#0:
; X32-AVX-NEXT:    pushl %ebp
; X32-AVX-NEXT:    movl %esp, %ebp
; X32-AVX-NEXT:    pushl %ebx
; X32-AVX-NEXT:    pushl %esi
; X32-AVX-NEXT:    andl $-8, %esp
; X32-AVX-NEXT:    subl $48, %esp
; X32-AVX-NEXT:    vandps {{\.LCPI.*}}, %ymm0, %ymm0
; X32-AVX-NEXT:    vpextrd $1, %xmm0, %eax
; X32-AVX-NEXT:    vpinsrd $1, %eax, %xmm0, %xmm1
; X32-AVX-NEXT:    vmovq %xmm1, {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    vpextrd $3, %xmm0, %ecx
; X32-AVX-NEXT:    vpshufd {{.*#+}} xmm1 = xmm0[2,3,0,1]
; X32-AVX-NEXT:    vpinsrd $1, %ecx, %xmm1, %xmm1
; X32-AVX-NEXT:    vmovq %xmm1, {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    vextractf128 $1, %ymm0, %xmm0
; X32-AVX-NEXT:    vpextrd $1, %xmm0, %edx
; X32-AVX-NEXT:    vpinsrd $1, %edx, %xmm0, %xmm1
; X32-AVX-NEXT:    vmovq %xmm1, {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    vpextrd $3, %xmm0, %esi
; X32-AVX-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,3,0,1]
; X32-AVX-NEXT:    vpinsrd $1, %esi, %xmm0, %xmm0
; X32-AVX-NEXT:    vmovq %xmm0, {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    xorl %ebx, %ebx
; X32-AVX-NEXT:    testl %eax, %eax
; X32-AVX-NEXT:    setns %bl
; X32-AVX-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fadds {{\.LCPI.*}}(,%ebx,4)
; X32-AVX-NEXT:    fstps (%esp)
; X32-AVX-NEXT:    xorl %eax, %eax
; X32-AVX-NEXT:    testl %ecx, %ecx
; X32-AVX-NEXT:    setns %al
; X32-AVX-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fadds {{\.LCPI.*}}(,%eax,4)
; X32-AVX-NEXT:    fstps {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    xorl %eax, %eax
; X32-AVX-NEXT:    testl %edx, %edx
; X32-AVX-NEXT:    setns %al
; X32-AVX-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fadds {{\.LCPI.*}}(,%eax,4)
; X32-AVX-NEXT:    fstps {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    xorl %eax, %eax
; X32-AVX-NEXT:    testl %esi, %esi
; X32-AVX-NEXT:    setns %al
; X32-AVX-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fadds {{\.LCPI.*}}(,%eax,4)
; X32-AVX-NEXT:    fstps {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    vmovss {{.*#+}} xmm0 = mem[0],zero,zero,zero
; X32-AVX-NEXT:    vinsertps {{.*#+}} xmm0 = xmm0[0],mem[0],xmm0[2,3]
; X32-AVX-NEXT:    vinsertps {{.*#+}} xmm0 = xmm0[0,1],mem[0],xmm0[3]
; X32-AVX-NEXT:    vinsertps {{.*#+}} xmm0 = xmm0[0,1,2],mem[0]
; X32-AVX-NEXT:    leal -8(%ebp), %esp
; X32-AVX-NEXT:    popl %esi
; X32-AVX-NEXT:    popl %ebx
; X32-AVX-NEXT:    popl %ebp
; X32-AVX-NEXT:    vzeroupper
; X32-AVX-NEXT:    retl
;
; X64-SSE-LABEL: mask_uitofp_4i64_4f32:
; X64-SSE:       # BB#0:
; X64-SSE-NEXT:    pand {{.*}}(%rip), %xmm0
; X64-SSE-NEXT:    pand {{.*}}(%rip), %xmm1
; X64-SSE-NEXT:    movd %xmm1, %rax
; X64-SSE-NEXT:    testq %rax, %rax
; X64-SSE-NEXT:    js .LBB3_1
; X64-SSE-NEXT:  # BB#2:
; X64-SSE-NEXT:    cvtsi2ssq %rax, %xmm3
; X64-SSE-NEXT:    jmp .LBB3_3
; X64-SSE-NEXT:  .LBB3_1:
; X64-SSE-NEXT:    movq %rax, %rcx
; X64-SSE-NEXT:    shrq %rcx
; X64-SSE-NEXT:    andl $1, %eax
; X64-SSE-NEXT:    orq %rcx, %rax
; X64-SSE-NEXT:    cvtsi2ssq %rax, %xmm3
; X64-SSE-NEXT:    addss %xmm3, %xmm3
; X64-SSE-NEXT:  .LBB3_3:
; X64-SSE-NEXT:    movd %xmm0, %rax
; X64-SSE-NEXT:    testq %rax, %rax
; X64-SSE-NEXT:    js .LBB3_4
; X64-SSE-NEXT:  # BB#5:
; X64-SSE-NEXT:    cvtsi2ssq %rax, %xmm2
; X64-SSE-NEXT:    jmp .LBB3_6
; X64-SSE-NEXT:  .LBB3_4:
; X64-SSE-NEXT:    movq %rax, %rcx
; X64-SSE-NEXT:    shrq %rcx
; X64-SSE-NEXT:    andl $1, %eax
; X64-SSE-NEXT:    orq %rcx, %rax
; X64-SSE-NEXT:    cvtsi2ssq %rax, %xmm2
; X64-SSE-NEXT:    addss %xmm2, %xmm2
; X64-SSE-NEXT:  .LBB3_6:
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[2,3,0,1]
; X64-SSE-NEXT:    movd %xmm1, %rax
; X64-SSE-NEXT:    testq %rax, %rax
; X64-SSE-NEXT:    js .LBB3_7
; X64-SSE-NEXT:  # BB#8:
; X64-SSE-NEXT:    xorps %xmm1, %xmm1
; X64-SSE-NEXT:    cvtsi2ssq %rax, %xmm1
; X64-SSE-NEXT:    jmp .LBB3_9
; X64-SSE-NEXT:  .LBB3_7:
; X64-SSE-NEXT:    movq %rax, %rcx
; X64-SSE-NEXT:    shrq %rcx
; X64-SSE-NEXT:    andl $1, %eax
; X64-SSE-NEXT:    orq %rcx, %rax
; X64-SSE-NEXT:    xorps %xmm1, %xmm1
; X64-SSE-NEXT:    cvtsi2ssq %rax, %xmm1
; X64-SSE-NEXT:    addss %xmm1, %xmm1
; X64-SSE-NEXT:  .LBB3_9:
; X64-SSE-NEXT:    unpcklps {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1]
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[2,3,0,1]
; X64-SSE-NEXT:    movd %xmm0, %rax
; X64-SSE-NEXT:    testq %rax, %rax
; X64-SSE-NEXT:    js .LBB3_10
; X64-SSE-NEXT:  # BB#11:
; X64-SSE-NEXT:    xorps %xmm0, %xmm0
; X64-SSE-NEXT:    cvtsi2ssq %rax, %xmm0
; X64-SSE-NEXT:    jmp .LBB3_12
; X64-SSE-NEXT:  .LBB3_10:
; X64-SSE-NEXT:    movq %rax, %rcx
; X64-SSE-NEXT:    shrq %rcx
; X64-SSE-NEXT:    andl $1, %eax
; X64-SSE-NEXT:    orq %rcx, %rax
; X64-SSE-NEXT:    xorps %xmm0, %xmm0
; X64-SSE-NEXT:    cvtsi2ssq %rax, %xmm0
; X64-SSE-NEXT:    addss %xmm0, %xmm0
; X64-SSE-NEXT:  .LBB3_12:
; X64-SSE-NEXT:    unpcklps {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1]
; X64-SSE-NEXT:    unpcklps {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
; X64-SSE-NEXT:    movaps %xmm2, %xmm0
; X64-SSE-NEXT:    retq
;
; X64-AVX-LABEL: mask_uitofp_4i64_4f32:
; X64-AVX:       # BB#0:
; X64-AVX-NEXT:    vandps {{.*}}(%rip), %ymm0, %ymm0
; X64-AVX-NEXT:    vpextrq $1, %xmm0, %rax
; X64-AVX-NEXT:    testq %rax, %rax
; X64-AVX-NEXT:    js .LBB3_1
; X64-AVX-NEXT:  # BB#2:
; X64-AVX-NEXT:    vcvtsi2ssq %rax, %xmm1, %xmm1
; X64-AVX-NEXT:    jmp .LBB3_3
; X64-AVX-NEXT:  .LBB3_1:
; X64-AVX-NEXT:    movq %rax, %rcx
; X64-AVX-NEXT:    shrq %rcx
; X64-AVX-NEXT:    andl $1, %eax
; X64-AVX-NEXT:    orq %rcx, %rax
; X64-AVX-NEXT:    vcvtsi2ssq %rax, %xmm1, %xmm1
; X64-AVX-NEXT:    vaddss %xmm1, %xmm1, %xmm1
; X64-AVX-NEXT:  .LBB3_3:
; X64-AVX-NEXT:    vmovq %xmm0, %rax
; X64-AVX-NEXT:    testq %rax, %rax
; X64-AVX-NEXT:    js .LBB3_4
; X64-AVX-NEXT:  # BB#5:
; X64-AVX-NEXT:    vcvtsi2ssq %rax, %xmm2, %xmm2
; X64-AVX-NEXT:    jmp .LBB3_6
; X64-AVX-NEXT:  .LBB3_4:
; X64-AVX-NEXT:    movq %rax, %rcx
; X64-AVX-NEXT:    shrq %rcx
; X64-AVX-NEXT:    andl $1, %eax
; X64-AVX-NEXT:    orq %rcx, %rax
; X64-AVX-NEXT:    vcvtsi2ssq %rax, %xmm2, %xmm2
; X64-AVX-NEXT:    vaddss %xmm2, %xmm2, %xmm2
; X64-AVX-NEXT:  .LBB3_6:
; X64-AVX-NEXT:    vinsertps {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[2,3]
; X64-AVX-NEXT:    vextractf128 $1, %ymm0, %xmm0
; X64-AVX-NEXT:    vmovq %xmm0, %rax
; X64-AVX-NEXT:    testq %rax, %rax
; X64-AVX-NEXT:    js .LBB3_7
; X64-AVX-NEXT:  # BB#8:
; X64-AVX-NEXT:    vcvtsi2ssq %rax, %xmm3, %xmm2
; X64-AVX-NEXT:    jmp .LBB3_9
; X64-AVX-NEXT:  .LBB3_7:
; X64-AVX-NEXT:    movq %rax, %rcx
; X64-AVX-NEXT:    shrq %rcx
; X64-AVX-NEXT:    andl $1, %eax
; X64-AVX-NEXT:    orq %rcx, %rax
; X64-AVX-NEXT:    vcvtsi2ssq %rax, %xmm3, %xmm2
; X64-AVX-NEXT:    vaddss %xmm2, %xmm2, %xmm2
; X64-AVX-NEXT:  .LBB3_9:
; X64-AVX-NEXT:    vinsertps {{.*#+}} xmm1 = xmm1[0,1],xmm2[0],xmm1[3]
; X64-AVX-NEXT:    vpextrq $1, %xmm0, %rax
; X64-AVX-NEXT:    testq %rax, %rax
; X64-AVX-NEXT:    js .LBB3_10
; X64-AVX-NEXT:  # BB#11:
; X64-AVX-NEXT:    vcvtsi2ssq %rax, %xmm3, %xmm0
; X64-AVX-NEXT:    vinsertps {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[0]
; X64-AVX-NEXT:    vzeroupper
; X64-AVX-NEXT:    retq
; X64-AVX-NEXT:  .LBB3_10:
; X64-AVX-NEXT:    movq %rax, %rcx
; X64-AVX-NEXT:    shrq %rcx
; X64-AVX-NEXT:    andl $1, %eax
; X64-AVX-NEXT:    orq %rcx, %rax
; X64-AVX-NEXT:    vcvtsi2ssq %rax, %xmm3, %xmm0
; X64-AVX-NEXT:    vaddss %xmm0, %xmm0, %xmm0
; X64-AVX-NEXT:    vinsertps {{.*#+}} xmm0 = xmm1[0,1,2],xmm0[0]
; X64-AVX-NEXT:    vzeroupper
; X64-AVX-NEXT:    retq
  %and = and <4 x i64> %a, <i64 127, i64 255, i64 4095, i64 65535>
  %cvt = uitofp <4 x i64> %and to <4 x float>
  ret <4 x float> %cvt
}

define <2 x double> @clamp_sitofp_2i64_2f64(<2 x i64> %a) nounwind {
; X32-SSE-LABEL: clamp_sitofp_2i64_2f64:
; X32-SSE:       # BB#0:
; X32-SSE-NEXT:    pushl %ebp
; X32-SSE-NEXT:    movl %esp, %ebp
; X32-SSE-NEXT:    andl $-8, %esp
; X32-SSE-NEXT:    subl $32, %esp
; X32-SSE-NEXT:    movdqa {{.*#+}} xmm1 = [2147483648,0,2147483648,0]
; X32-SSE-NEXT:    movdqa %xmm0, %xmm2
; X32-SSE-NEXT:    pxor %xmm1, %xmm2
; X32-SSE-NEXT:    movdqa {{.*#+}} xmm3 = [2147483393,4294967295,2147483393,4294967295]
; X32-SSE-NEXT:    movdqa %xmm3, %xmm4
; X32-SSE-NEXT:    pcmpgtd %xmm2, %xmm4
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2]
; X32-SSE-NEXT:    pcmpeqd %xmm3, %xmm2
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3]
; X32-SSE-NEXT:    pand %xmm5, %xmm2
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3]
; X32-SSE-NEXT:    por %xmm2, %xmm3
; X32-SSE-NEXT:    movdqa %xmm3, %xmm2
; X32-SSE-NEXT:    pandn %xmm0, %xmm2
; X32-SSE-NEXT:    pand {{\.LCPI.*}}, %xmm3
; X32-SSE-NEXT:    por %xmm2, %xmm3
; X32-SSE-NEXT:    pxor %xmm3, %xmm1
; X32-SSE-NEXT:    movdqa {{.*#+}} xmm0 = [2147483903,0,2147483903,0]
; X32-SSE-NEXT:    movdqa %xmm1, %xmm2
; X32-SSE-NEXT:    pcmpgtd %xmm0, %xmm2
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm4 = xmm2[0,0,2,2]
; X32-SSE-NEXT:    pcmpeqd %xmm0, %xmm1
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3]
; X32-SSE-NEXT:    pand %xmm4, %xmm0
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm1 = xmm2[1,1,3,3]
; X32-SSE-NEXT:    por %xmm0, %xmm1
; X32-SSE-NEXT:    movdqa %xmm1, %xmm0
; X32-SSE-NEXT:    pandn %xmm3, %xmm0
; X32-SSE-NEXT:    pand {{\.LCPI.*}}, %xmm1
; X32-SSE-NEXT:    por %xmm0, %xmm1
; X32-SSE-NEXT:    movq {{.*#+}} xmm0 = xmm1[0],zero
; X32-SSE-NEXT:    movq %xmm0, {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm1[2,3,0,1]
; X32-SSE-NEXT:    movq %xmm0, {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fstpl {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-SSE-NEXT:    fstpl (%esp)
; X32-SSE-NEXT:    movsd {{.*#+}} xmm0 = mem[0],zero
; X32-SSE-NEXT:    movhpd {{.*#+}} xmm0 = xmm0[0],mem[0]
; X32-SSE-NEXT:    movl %ebp, %esp
; X32-SSE-NEXT:    popl %ebp
; X32-SSE-NEXT:    retl
;
; X32-AVX-LABEL: clamp_sitofp_2i64_2f64:
; X32-AVX:       # BB#0:
; X32-AVX-NEXT:    pushl %ebp
; X32-AVX-NEXT:    movl %esp, %ebp
; X32-AVX-NEXT:    andl $-8, %esp
; X32-AVX-NEXT:    subl $32, %esp
; X32-AVX-NEXT:    vmovdqa {{.*#+}} xmm1 = [4294967041,4294967295,4294967041,4294967295]
; X32-AVX-NEXT:    vpcmpgtq %xmm0, %xmm1, %xmm2
; X32-AVX-NEXT:    vblendvpd %xmm2, %xmm1, %xmm0, %xmm0
; X32-AVX-NEXT:    vmovdqa {{.*#+}} xmm1 = [255,0,255,0]
; X32-AVX-NEXT:    vpcmpgtq %xmm1, %xmm0, %xmm2
; X32-AVX-NEXT:    vblendvpd %xmm2, %xmm1, %xmm0, %xmm0
; X32-AVX-NEXT:    vmovq {{.*#+}} xmm1 = xmm0[0],zero
; X32-AVX-NEXT:    vmovq %xmm1, {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,3,0,1]
; X32-AVX-NEXT:    vmovq %xmm0, {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fstpl {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fildll {{[0-9]+}}(%esp)
; X32-AVX-NEXT:    fstpl (%esp)
; X32-AVX-NEXT:    vmovsd {{.*#+}} xmm0 = mem[0],zero
; X32-AVX-NEXT:    vmovhpd {{.*#+}} xmm0 = xmm0[0],mem[0]
; X32-AVX-NEXT:    movl %ebp, %esp
; X32-AVX-NEXT:    popl %ebp
; X32-AVX-NEXT:    retl
;
; X64-SSE-LABEL: clamp_sitofp_2i64_2f64:
; X64-SSE:       # BB#0:
; X64-SSE-NEXT:    movdqa {{.*#+}} xmm1 = [2147483648,0,2147483648,0]
; X64-SSE-NEXT:    movdqa %xmm0, %xmm2
; X64-SSE-NEXT:    pxor %xmm1, %xmm2
; X64-SSE-NEXT:    movdqa {{.*#+}} xmm3 = [18446744073709551361,18446744073709551361]
; X64-SSE-NEXT:    movdqa %xmm1, %xmm4
; X64-SSE-NEXT:    pxor %xmm3, %xmm4
; X64-SSE-NEXT:    movdqa %xmm4, %xmm5
; X64-SSE-NEXT:    pcmpgtd %xmm2, %xmm5
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm6 = xmm5[0,0,2,2]
; X64-SSE-NEXT:    pcmpeqd %xmm2, %xmm4
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm2 = xmm4[1,1,3,3]
; X64-SSE-NEXT:    pand %xmm6, %xmm2
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm4 = xmm5[1,1,3,3]
; X64-SSE-NEXT:    por %xmm2, %xmm4
; X64-SSE-NEXT:    movdqa %xmm4, %xmm2
; X64-SSE-NEXT:    pandn %xmm0, %xmm2
; X64-SSE-NEXT:    pand %xmm3, %xmm4
; X64-SSE-NEXT:    por %xmm2, %xmm4
; X64-SSE-NEXT:    movdqa %xmm4, %xmm0
; X64-SSE-NEXT:    pxor %xmm1, %xmm0
; X64-SSE-NEXT:    movdqa {{.*#+}} xmm2 = [255,255]
; X64-SSE-NEXT:    pxor %xmm2, %xmm1
; X64-SSE-NEXT:    movdqa %xmm0, %xmm3
; X64-SSE-NEXT:    pcmpgtd %xmm1, %xmm3
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm5 = xmm3[0,0,2,2]
; X64-SSE-NEXT:    pcmpeqd %xmm0, %xmm1
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3]
; X64-SSE-NEXT:    pand %xmm5, %xmm0
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm1 = xmm3[1,1,3,3]
; X64-SSE-NEXT:    por %xmm0, %xmm1
; X64-SSE-NEXT:    movdqa %xmm1, %xmm0
; X64-SSE-NEXT:    pandn %xmm4, %xmm0
; X64-SSE-NEXT:    pand %xmm2, %xmm1
; X64-SSE-NEXT:    por %xmm0, %xmm1
; X64-SSE-NEXT:    movd %xmm1, %rax
; X64-SSE-NEXT:    xorps %xmm0, %xmm0
; X64-SSE-NEXT:    cvtsi2sdq %rax, %xmm0
; X64-SSE-NEXT:    pshufd {{.*#+}} xmm1 = xmm1[2,3,0,1]
; X64-SSE-NEXT:    movd %xmm1, %rax
; X64-SSE-NEXT:    xorps %xmm1, %xmm1
; X64-SSE-NEXT:    cvtsi2sdq %rax, %xmm1
; X64-SSE-NEXT:    unpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
; X64-SSE-NEXT:    retq
;
; X64-AVX-LABEL: clamp_sitofp_2i64_2f64:
; X64-AVX:       # BB#0:
; X64-AVX-NEXT:    vmovdqa {{.*#+}} xmm1 = [18446744073709551361,18446744073709551361]
; X64-AVX-NEXT:    vpcmpgtq %xmm0, %xmm1, %xmm2
; X64-AVX-NEXT:    vblendvpd %xmm2, %xmm1, %xmm0, %xmm0
; X64-AVX-NEXT:    vmovdqa {{.*#+}} xmm1 = [255,255]
; X64-AVX-NEXT:    vpcmpgtq %xmm1, %xmm0, %xmm2
; X64-AVX-NEXT:    vblendvpd %xmm2, %xmm1, %xmm0, %xmm0
; X64-AVX-NEXT:    vpextrq $1, %xmm0, %rax
; X64-AVX-NEXT:    vcvtsi2sdq %rax, %xmm3, %xmm1
; X64-AVX-NEXT:    vmovq %xmm0, %rax
; X64-AVX-NEXT:    vcvtsi2sdq %rax, %xmm3, %xmm0
; X64-AVX-NEXT:    vunpcklpd {{.*#+}} xmm0 = xmm0[0],xmm1[0]
; X64-AVX-NEXT:    retq
  %clo = icmp slt <2 x i64> %a, <i64 -255, i64 -255>
  %lo = select <2 x i1> %clo, <2 x i64> <i64 -255, i64 -255>, <2 x i64> %a
  %chi = icmp sgt <2 x i64> %lo, <i64 255, i64 255>
  %hi = select <2 x i1> %chi, <2 x i64> <i64 255, i64 255>, <2 x i64> %lo
  %cvt = sitofp <2 x i64> %hi to <2 x double>
  ret <2 x double> %cvt
}
OpenPOWER on IntegriCloud