1 |
734 |
jeremybenn |
/*
|
2 |
|
|
* Special support for e500 eabi and SVR4
|
3 |
|
|
*
|
4 |
|
|
* Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
|
5 |
|
|
* Written by Nathan Froyd
|
6 |
|
|
*
|
7 |
|
|
* This file is free software; you can redistribute it and/or modify it
|
8 |
|
|
* under the terms of the GNU General Public License as published by the
|
9 |
|
|
* Free Software Foundation; either version 3, or (at your option) any
|
10 |
|
|
* later version.
|
11 |
|
|
*
|
12 |
|
|
* This file is distributed in the hope that it will be useful, but
|
13 |
|
|
* WITHOUT ANY WARRANTY; without even the implied warranty of
|
14 |
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
15 |
|
|
* General Public License for more details.
|
16 |
|
|
*
|
17 |
|
|
* Under Section 7 of GPL version 3, you are granted additional
|
18 |
|
|
* permissions described in the GCC Runtime Library Exception, version
|
19 |
|
|
* 3.1, as published by the Free Software Foundation.
|
20 |
|
|
*
|
21 |
|
|
* You should have received a copy of the GNU General Public License and
|
22 |
|
|
* a copy of the GCC Runtime Library Exception along with this program;
|
23 |
|
|
* see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
|
24 |
|
|
* .
|
25 |
|
|
*/
|
26 |
|
|
|
27 |
|
|
.section ".text"
|
28 |
|
|
#include "ppc-asm.h"
|
29 |
|
|
|
30 |
|
|
#ifdef __SPE__
|
31 |
|
|
|
32 |
|
|
/* Routines for restoring 64-bit integer registers where the number of
|
33 |
|
|
registers to be restored is passed in CTR, called by the compiler. */
|
34 |
|
|
|
35 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_14) evldd 14,0(11)
|
36 |
|
|
bdz _rest64gpr_ctr_done
|
37 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_15) evldd 15,8(11)
|
38 |
|
|
bdz _rest64gpr_ctr_done
|
39 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_16) evldd 16,16(11)
|
40 |
|
|
bdz _rest64gpr_ctr_done
|
41 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_17) evldd 17,24(11)
|
42 |
|
|
bdz _rest64gpr_ctr_done
|
43 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_18) evldd 18,32(11)
|
44 |
|
|
bdz _rest64gpr_ctr_done
|
45 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_19) evldd 19,40(11)
|
46 |
|
|
bdz _rest64gpr_ctr_done
|
47 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_20) evldd 20,48(11)
|
48 |
|
|
bdz _rest64gpr_ctr_done
|
49 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_21) evldd 21,56(11)
|
50 |
|
|
bdz _rest64gpr_ctr_done
|
51 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_22) evldd 22,64(11)
|
52 |
|
|
bdz _rest64gpr_ctr_done
|
53 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_23) evldd 23,72(11)
|
54 |
|
|
bdz _rest64gpr_ctr_done
|
55 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_24) evldd 24,80(11)
|
56 |
|
|
bdz _rest64gpr_ctr_done
|
57 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_25) evldd 25,88(11)
|
58 |
|
|
bdz _rest64gpr_ctr_done
|
59 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_26) evldd 26,96(11)
|
60 |
|
|
bdz _rest64gpr_ctr_done
|
61 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_27) evldd 27,104(11)
|
62 |
|
|
bdz _rest64gpr_ctr_done
|
63 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_28) evldd 28,112(11)
|
64 |
|
|
bdz _rest64gpr_ctr_done
|
65 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_29) evldd 29,120(11)
|
66 |
|
|
bdz _rest64gpr_ctr_done
|
67 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_30) evldd 30,128(11)
|
68 |
|
|
bdz _rest64gpr_ctr_done
|
69 |
|
|
HIDDEN_FUNC(_rest64gpr_ctr_31) evldd 31,136(11)
|
70 |
|
|
_rest64gpr_ctr_done: blr
|
71 |
|
|
FUNC_END(_rest64gpr_ctr_31)
|
72 |
|
|
FUNC_END(_rest64gpr_ctr_30)
|
73 |
|
|
FUNC_END(_rest64gpr_ctr_29)
|
74 |
|
|
FUNC_END(_rest64gpr_ctr_28)
|
75 |
|
|
FUNC_END(_rest64gpr_ctr_27)
|
76 |
|
|
FUNC_END(_rest64gpr_ctr_26)
|
77 |
|
|
FUNC_END(_rest64gpr_ctr_25)
|
78 |
|
|
FUNC_END(_rest64gpr_ctr_24)
|
79 |
|
|
FUNC_END(_rest64gpr_ctr_23)
|
80 |
|
|
FUNC_END(_rest64gpr_ctr_22)
|
81 |
|
|
FUNC_END(_rest64gpr_ctr_21)
|
82 |
|
|
FUNC_END(_rest64gpr_ctr_20)
|
83 |
|
|
FUNC_END(_rest64gpr_ctr_19)
|
84 |
|
|
FUNC_END(_rest64gpr_ctr_18)
|
85 |
|
|
FUNC_END(_rest64gpr_ctr_17)
|
86 |
|
|
FUNC_END(_rest64gpr_ctr_16)
|
87 |
|
|
FUNC_END(_rest64gpr_ctr_15)
|
88 |
|
|
FUNC_END(_rest64gpr_ctr_14)
|
89 |
|
|
|
90 |
|
|
#endif
|