1 |
27 |
unneback |
/*****************************************************************************
|
2 |
|
|
vectors.S -- mcf52xx exception vectors
|
3 |
|
|
*****************************************************************************/
|
4 |
|
|
#####ECOSGPLCOPYRIGHTBEGIN####
|
5 |
|
|
## -------------------------------------------
|
6 |
|
|
## This file is part of eCos, the Embedded Configurable Operating System.
|
7 |
|
|
## Copyright (C) 1998, 1999, 2000, 2001, 2002 Red Hat, Inc.
|
8 |
|
|
##
|
9 |
|
|
## eCos is free software; you can redistribute it and/or modify it under
|
10 |
|
|
## the terms of the GNU General Public License as published by the Free
|
11 |
|
|
## Software Foundation; either version 2 or (at your option) any later version.
|
12 |
|
|
##
|
13 |
|
|
## eCos is distributed in the hope that it will be useful, but WITHOUT ANY
|
14 |
|
|
## WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
15 |
|
|
## FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
16 |
|
|
## for more details.
|
17 |
|
|
##
|
18 |
|
|
## You should have received a copy of the GNU General Public License along
|
19 |
|
|
## with eCos; if not, write to the Free Software Foundation, Inc.,
|
20 |
|
|
## 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
|
21 |
|
|
##
|
22 |
|
|
## As a special exception, if other files instantiate templates or use macros
|
23 |
|
|
## or inline functions from this file, or you compile this file and link it
|
24 |
|
|
## with other works to produce a work based on this file, this file does not
|
25 |
|
|
## by itself cause the resulting work to be covered by the GNU General Public
|
26 |
|
|
## License. However the source code for this file must still be made available
|
27 |
|
|
## in accordance with section (3) of the GNU General Public License.
|
28 |
|
|
##
|
29 |
|
|
## This exception does not invalidate any other reasons why a work based on
|
30 |
|
|
## this file might be covered by the GNU General Public License.
|
31 |
|
|
##
|
32 |
|
|
## Alternative licenses for eCos may be arranged by contacting Red Hat, Inc.
|
33 |
|
|
## at http://sources.redhat.com/ecos/ecos-license/
|
34 |
|
|
## -------------------------------------------
|
35 |
|
|
#####ECOSGPLCOPYRIGHTEND####
|
36 |
|
|
##=============================================================================
|
37 |
|
|
|
38 |
|
|
#include
|
39 |
|
|
#include
|
40 |
|
|
|
41 |
|
|
#ifdef CYGPKG_KERNEL
|
42 |
|
|
#include
|
43 |
|
|
#endif /* CYGPKG_KERNEL */
|
44 |
|
|
|
45 |
|
|
/****************************************************************************/
|
46 |
|
|
|
47 |
|
|
.file "vectors.S"
|
48 |
|
|
|
49 |
|
|
/****************************************************************************/
|
50 |
|
|
|
51 |
|
|
/****************************************************************************/
|
52 |
|
|
|
53 |
|
|
/* The mcf52xx core allows us to move the VBR to our RAM vector table */
|
54 |
|
|
/* at cyg_hal_vsr_table provided the table is at a 1MB boundary. */
|
55 |
|
|
|
56 |
|
|
/****************************************************************************/
|
57 |
|
|
/* ROM vector table */
|
58 |
|
|
|
59 |
|
|
/* Create the ROM vector table. We use this table to initialize */
|
60 |
|
|
/* cyg_hal_vsr_table which we point the VBR to. */
|
61 |
|
|
|
62 |
|
|
.macro hw_vsr name
|
63 |
|
|
.long hw_vsr_\name
|
64 |
|
|
.endm
|
65 |
|
|
|
66 |
|
|
.section ".romvec","ax"
|
67 |
|
|
|
68 |
|
|
.globl rom_vsr_table
|
69 |
|
|
rom_vsr_table:
|
70 |
|
|
|
71 |
|
|
/* 0 - Initial SSP */
|
72 |
|
|
hw_vsr stack
|
73 |
|
|
|
74 |
|
|
/* 1 - Initial PC */
|
75 |
|
|
hw_vsr reset
|
76 |
|
|
|
77 |
|
|
/* 2-24 - Default exception handlers */
|
78 |
|
|
.rept 24-2+1
|
79 |
|
|
hw_vsr default
|
80 |
|
|
.endr
|
81 |
|
|
|
82 |
|
|
/* 25-31 - Autovector interrupts 1-7 */
|
83 |
|
|
.rept 31-25+1
|
84 |
|
|
hw_vsr autovec
|
85 |
|
|
.endr
|
86 |
|
|
|
87 |
|
|
/* 32-63 - Default exception handlers */
|
88 |
|
|
.rept 63-32+1
|
89 |
|
|
hw_vsr default
|
90 |
|
|
.endr
|
91 |
|
|
|
92 |
|
|
/* 64-255 - User interrupt vectors */
|
93 |
|
|
.rept 255-64+1
|
94 |
|
|
hw_vsr interrupt
|
95 |
|
|
.endr
|
96 |
|
|
|
97 |
|
|
.equ rom_vsr_table_size, . - rom_vsr_table
|
98 |
|
|
|
99 |
|
|
/****************************************************************************/
|
100 |
|
|
/* The default excetpion vector handler */
|
101 |
|
|
|
102 |
|
|
/* The default handler for all machine exceptions. We save the */
|
103 |
|
|
/* machine state and call the default C VSR handler. This routine passes a */
|
104 |
|
|
/* pointer to the saved state to the C VSR handler. The stack pointer in */
|
105 |
|
|
/* the saved state points to the byte following the PC on the exception */
|
106 |
|
|
/* stack (the sp before the exception). The format/vector word in the */
|
107 |
|
|
/* exception stack contains the vector number. */
|
108 |
|
|
|
109 |
|
|
/*
|
110 |
|
|
void hal_default_exception_handler(CYG_WORD vector, HAL_SavedRegisters *regs);
|
111 |
|
|
*/
|
112 |
|
|
|
113 |
|
|
.text
|
114 |
|
|
.balign 4
|
115 |
|
|
hw_vsr_default:
|
116 |
|
|
|
117 |
|
|
lea.l -(16*4)(%sp),%sp /* Preserve the entire state. */
|
118 |
|
|
movem.l %d0-%d7/%a0-%a6,(%sp) /* Allocate space for all */
|
119 |
|
|
lea.l ((16+1+1)*4)(%sp),%a0 /* registers (including the */
|
120 |
|
|
move.l %a0,(15*4)(%sp) /* stack pointer). Write all */
|
121 |
|
|
/* registers to the stack */
|
122 |
|
|
/* space. Write the original */
|
123 |
|
|
/* stack pointer value to the */
|
124 |
|
|
/* stack. The format/vector */
|
125 |
|
|
/* word, sr, and pc are already */
|
126 |
|
|
/* on the stack. */
|
127 |
|
|
|
128 |
|
|
move.w (16*4)(%sp),%d0 /* Calculate the vector */
|
129 |
|
|
and.l #0x000003fc,%d0 /* number. The format/vector */
|
130 |
|
|
lsr.l #2,%d0 /* word on the stack contains */
|
131 |
|
|
/* the vector number. */
|
132 |
|
|
|
133 |
|
|
pea.l (%sp) /* Pass a pointer to the */
|
134 |
|
|
/* saved state to the exception */
|
135 |
|
|
/* handler. */
|
136 |
|
|
|
137 |
|
|
move.l %d0,-(%sp) /* Push the vector number */
|
138 |
|
|
/* parameter. */
|
139 |
|
|
|
140 |
|
|
.extern hal_default_exception_handler /* Call the default */
|
141 |
|
|
jbsr hal_default_exception_handler /* exception VSR. This */
|
142 |
|
|
/* routine may (and */
|
143 |
|
|
/* probably will) modify */
|
144 |
|
|
/* the exception context. */
|
145 |
|
|
|
146 |
|
|
addq.l #2*4,%sp /* Remove the vector number */
|
147 |
|
|
/* and the state pointer from */
|
148 |
|
|
/* the stack. */
|
149 |
|
|
|
150 |
|
|
/* Restore the state. There */
|
151 |
|
|
/* is a chance that a debugger */
|
152 |
|
|
/* changed the state (including */
|
153 |
|
|
/* the stack pointer, PC, */
|
154 |
|
|
/* etc.). We must be very */
|
155 |
|
|
/* careful to restore the new */
|
156 |
|
|
/* state without first */
|
157 |
|
|
/* overwriting the values on */
|
158 |
|
|
/* the stack. We must copy the */
|
159 |
|
|
/* format/vector word, SR, and */
|
160 |
|
|
/* PC to the new stack, but we */
|
161 |
|
|
/* must make sure that the new */
|
162 |
|
|
/* stack is not in the middle */
|
163 |
|
|
/* of our current stack */
|
164 |
|
|
/* variables that we are using. */
|
165 |
|
|
|
166 |
|
|
movem.l (%sp),%d0-%d7/%a0-%a4 /* Restore all of the */
|
167 |
|
|
/* registers that we do not */
|
168 |
|
|
/* need in the following code. */
|
169 |
|
|
/* We will copy all registers */
|
170 |
|
|
/* that are not restored here */
|
171 |
|
|
/* to the new stack before */
|
172 |
|
|
/* restoring them. */
|
173 |
|
|
|
174 |
|
|
move.l (15*4)(%sp),%a6 /* Load the address of the */
|
175 |
|
|
/* new SP. */
|
176 |
|
|
|
177 |
|
|
lea.l (18*4)(%sp),%a5 /* Get a pointer to the */
|
178 |
|
|
/* location following the */
|
179 |
|
|
/* exception context. */
|
180 |
|
|
|
181 |
|
|
cmp.l %a5,%a6 /* Compare the new stack */
|
182 |
|
|
jcc 1f /*jcc=jhs*/ /* address to the end of the */
|
183 |
|
|
/* exception context. This */
|
184 |
|
|
/* will tell us the order that */
|
185 |
|
|
/* we need to copy the */
|
186 |
|
|
/* exception stack and the */
|
187 |
|
|
/* remaining registers from the */
|
188 |
|
|
/* exception context to the new */
|
189 |
|
|
/* stack. The order is */
|
190 |
|
|
/* important because the stack */
|
191 |
|
|
/* frames might (and in many */
|
192 |
|
|
/* cases do) overlap. */
|
193 |
|
|
|
194 |
|
|
/* The new SP is at a lower */
|
195 |
|
|
/* address than the end of the */
|
196 |
|
|
/* exception context. Copy */
|
197 |
|
|
/* from the lowest address to */
|
198 |
|
|
/* the highest address. */
|
199 |
|
|
|
200 |
|
|
lea.l -5*4(%a6),%a6 /* Copy A5, A6, FVW, SR, and */
|
201 |
|
|
move.l -5*4(%a5),(%a6) /* PC from the old stack to the */
|
202 |
|
|
move.l -4*4(%a5),1*4(%a6) /* new stack. Note that we do */
|
203 |
|
|
move.l -2*4(%a5),3*4(%a6) /* not copy the SP location but */
|
204 |
|
|
move.l -1*4(%a5),4*4(%a6) /* we leave a space for it on */
|
205 |
|
|
/* the new stack. If we do not */
|
206 |
|
|
/* leave space for the SP, */
|
207 |
|
|
/* there is a possibility of */
|
208 |
|
|
/* overwriting some of our */
|
209 |
|
|
/* data. Note that we copy in */
|
210 |
|
|
/* increasing order. */
|
211 |
|
|
|
212 |
|
|
move.l %a6,%sp /* A6 points to the top of */
|
213 |
|
|
move.l (%sp)+,%a5 /* the new stack with our */
|
214 |
|
|
move.l (%sp)+,%a6 /* registers on it. Restore */
|
215 |
|
|
addq.l #1*4,%sp /* the remaining registers and */
|
216 |
|
|
rte /* use the exception stack to */
|
217 |
|
|
/* return. Note that we also */
|
218 |
|
|
/* remove the unused space left */
|
219 |
|
|
/* for the SP. */
|
220 |
|
|
|
221 |
|
|
1:
|
222 |
|
|
|
223 |
|
|
move.l -(%a5),-(%a6) /* The new SP is at a higher */
|
224 |
|
|
move.l -(%a5),-(%a6) /* or the same address as the */
|
225 |
|
|
subq.l #4,%a5 /* end of the exception */
|
226 |
|
|
move.l -(%a5),-(%a6) /* context. Copy from the */
|
227 |
|
|
move.l -(%a5),-(%a6) /* highest address to the */
|
228 |
|
|
/* lowest address. Note that */
|
229 |
|
|
/* we do not copy the stack */
|
230 |
|
|
/* pointer. When copying in */
|
231 |
|
|
/* this direction, there is no */
|
232 |
|
|
/* reason to leave space for */
|
233 |
|
|
/* the stack pointer. */
|
234 |
|
|
|
235 |
|
|
move.l %a6,%sp /* A6 points to the top of */
|
236 |
|
|
move.l (%sp)+,%a5 /* the new stack with our */
|
237 |
|
|
move.l (%sp)+,%a6 /* registers on it. Restore */
|
238 |
|
|
rte /* the remaining registers and */
|
239 |
|
|
/* use the exception stack to */
|
240 |
|
|
/* return. */
|
241 |
|
|
|
242 |
|
|
/****************************************************************************/
|
243 |
|
|
/* User interrupt vector handler */
|
244 |
|
|
|
245 |
|
|
/* Control is transferred here from a user interrupt vector (#64-255). */
|
246 |
|
|
|
247 |
|
|
/* Before branching to common code, load a value to translate the */
|
248 |
|
|
/* vector table offset to the ISR table offset. The ISR vector table */
|
249 |
|
|
/* contains the autovectors (0-6) followed by the interrupt vectors */
|
250 |
|
|
/* (7-198). */
|
251 |
|
|
|
252 |
|
|
.equ int_pres_regs_sz,((2+3)*4)
|
253 |
|
|
.macro int_pres_regs
|
254 |
|
|
lea.l -int_pres_regs_sz(%sp),%sp
|
255 |
|
|
movem.l %d0-%d2/%a0-%a1,(%sp)
|
256 |
|
|
.endm
|
257 |
|
|
.macro int_rest_regs
|
258 |
|
|
movem.l (%sp),%d0-%d2/%a0-%a1
|
259 |
|
|
lea.l int_pres_regs_sz(%sp),%sp
|
260 |
|
|
.endm
|
261 |
|
|
|
262 |
|
|
.text
|
263 |
|
|
.balign 4
|
264 |
|
|
hw_vsr_interrupt:
|
265 |
|
|
|
266 |
|
|
int_pres_regs /* Preserve all registers */
|
267 |
|
|
/* that this ISR routine needs */
|
268 |
|
|
/* to preserve. The C code */
|
269 |
|
|
/* will preserve all other */
|
270 |
|
|
/* registers. */
|
271 |
|
|
|
272 |
|
|
move.l #(-64+7)*4,%d0 /* Adding this value to the */
|
273 |
|
|
/* vector table offset will */
|
274 |
|
|
/* result in the corresponding */
|
275 |
|
|
/* offset into the ISR table. */
|
276 |
|
|
|
277 |
|
|
/* Fall through to common code. */
|
278 |
|
|
|
279 |
|
|
hw_vsr_int_common: /* Common code. */
|
280 |
|
|
|
281 |
|
|
/* d0.l: Contains a value to translate the vector table offset to */
|
282 |
|
|
/* the ISR table offset. */
|
283 |
|
|
|
284 |
|
|
move.w int_pres_regs_sz(%sp),%d1 /* Calculate the vector */
|
285 |
|
|
and.l #0x000003fc,%d1 /* offset. The format/vector */
|
286 |
|
|
/* word on the stack contains */
|
287 |
|
|
/* the vector number. Mask off */
|
288 |
|
|
/* all unused bits. The bit */
|
289 |
|
|
/* position of the vector */
|
290 |
|
|
/* number field makes it */
|
291 |
|
|
/* automatically multiplied by */
|
292 |
|
|
/* four. */
|
293 |
|
|
|
294 |
|
|
add.l %d1,%d0 /* Calculate the ISR table */
|
295 |
|
|
/* offset. Add the vector */
|
296 |
|
|
/* table offset to the */
|
297 |
|
|
/* translation value. */
|
298 |
|
|
|
299 |
|
|
asr.l #2,%d1 /* Calculate the vector */
|
300 |
|
|
/* number using the vector */
|
301 |
|
|
/* table offset. */
|
302 |
|
|
|
303 |
|
|
/* d0.l: Contains the offset into the ISR table. */
|
304 |
|
|
|
305 |
|
|
/* d1.l: Contains the vector number. */
|
306 |
|
|
|
307 |
|
|
#ifdef CYGFUN_HAL_COMMON_KERNEL_SUPPORT
|
308 |
|
|
|
309 |
|
|
.extern cyg_scheduler_sched_lock /* Lock the scheduler if */
|
310 |
|
|
addq.l #1,cyg_scheduler_sched_lock /* we are using the kernel. */
|
311 |
|
|
|
312 |
|
|
#endif /* CYGFUN_HAL_COMMON_KERNEL_SUPPORT */
|
313 |
|
|
|
314 |
|
|
/* We need to call the following routines. The isr address, data, and */
|
315 |
|
|
/* intr are all from the ISR table. interrupt_end is a C routine and is */
|
316 |
|
|
/* only called if we are using the kernel. regs points to the saved */
|
317 |
|
|
/* registers on the stack. isr_ret is the return address from isr. vector */
|
318 |
|
|
/* is the vector number. */
|
319 |
|
|
|
320 |
|
|
/*
|
321 |
|
|
static cyg_uint32 isr(CYG_ADDRWORD vector,
|
322 |
|
|
CYG_ADDRWORD data)
|
323 |
|
|
|
324 |
|
|
externC void interrupt_end(cyg_uint32 isr_ret,
|
325 |
|
|
Cyg_Interrupt *intr,
|
326 |
|
|
HAL_SavedRegisters *regs)
|
327 |
|
|
*/
|
328 |
|
|
|
329 |
|
|
pea (%sp) /* Push the regs pointer. */
|
330 |
|
|
|
331 |
|
|
.extern cyg_hal_interrupt_objects /* Push the intr object */
|
332 |
|
|
lea cyg_hal_interrupt_objects,%a0 /* pointer from the table. */
|
333 |
|
|
move.l (%a0,%d0.l),-(%sp)
|
334 |
|
|
|
335 |
|
|
.extern cyg_hal_interrupt_data /* Push the data value */
|
336 |
|
|
lea cyg_hal_interrupt_data,%a0 /* from the table. */
|
337 |
|
|
move.l (%a0,%d0.l),-(%sp)
|
338 |
|
|
|
339 |
|
|
.extern cyg_hal_interrupt_handlers /* Get the address of the */
|
340 |
|
|
lea cyg_hal_interrupt_handlers,%a0 /* ISR from the table. */
|
341 |
|
|
move.l (%a0,%d0.l),%a0
|
342 |
|
|
|
343 |
|
|
move.l %d1,-(%sp) /* Push the vector number */
|
344 |
|
|
/* parameter. */
|
345 |
|
|
|
346 |
|
|
jbsr (%a0) /* Call the ISR. */
|
347 |
|
|
|
348 |
|
|
addq.l #4*1,%sp /* Remove the vector */
|
349 |
|
|
/* parameter from the stack. */
|
350 |
|
|
|
351 |
|
|
move.l %d0,(%sp) /* d0.l contains the return */
|
352 |
|
|
/* value from the ISR. */
|
353 |
|
|
/* Overwrite the data parameter */
|
354 |
|
|
/* with the ISR return value to */
|
355 |
|
|
/* pass as a parameter */
|
356 |
|
|
/* (isr_ret) to interrupt_end. */
|
357 |
|
|
/* The intr object and regs */
|
358 |
|
|
/* parameters are still on the */
|
359 |
|
|
/* stack. */
|
360 |
|
|
|
361 |
|
|
#ifdef CYGFUN_HAL_COMMON_KERNEL_SUPPORT
|
362 |
|
|
|
363 |
|
|
/* We only need to call interrupt_end() when there is a kernel */
|
364 |
|
|
/* present to do any tidying up. */
|
365 |
|
|
|
366 |
|
|
/* The interrupt_end routine will call the DSRs and do */
|
367 |
|
|
/* rescheduling when it decrements the scheduler lock from 1 to */
|
368 |
|
|
/* zero. In this case, we do not want to have interrupts masked */
|
369 |
|
|
/* while the DSRs run. Restore the interrupt mask to the value */
|
370 |
|
|
/* prior to this interrupt. Do not completely unmask all */
|
371 |
|
|
/* interrupts because this interrupt may be a nested interrupt. We */
|
372 |
|
|
/* do not want to lower the interrupt mask on the lower priority */
|
373 |
|
|
/* interrupt. */
|
374 |
|
|
|
375 |
|
|
move.w (4*3)+int_pres_regs_sz+2(%sp),%d2
|
376 |
|
|
move.w %d2,%sr
|
377 |
|
|
|
378 |
|
|
/* If the interrupt mask was not previously zero, we want to make */
|
379 |
|
|
/* sure that the DSRs to not run and no preemption occurs. Add the */
|
380 |
|
|
/* value of the previous interrupt mask to the scheduler lock. If */
|
381 |
|
|
/* the previous mask was zero, the scheduler lock will remain at */
|
382 |
|
|
/* one and the interrupt end function will decrement it to zero. */
|
383 |
|
|
/* Otherwise, we want to prevent the interrupt end function from */
|
384 |
|
|
/* unlocking the scheduler. We do this because there is a chance */
|
385 |
|
|
/* that someone had interrupts masked with the scheduler lock at */
|
386 |
|
|
/* zero. If a higher priority interrupt occurs, we could be */
|
387 |
|
|
/* running DSRs and doing preemption with the interrupts masked! */
|
388 |
|
|
|
389 |
|
|
and.l #0x0700,%d2
|
390 |
|
|
lsr.l #8,%d2
|
391 |
|
|
add.l %d2,cyg_scheduler_sched_lock
|
392 |
|
|
|
393 |
|
|
.extern interrupt_end /* Call the interrupt_end C */
|
394 |
|
|
jbsr interrupt_end /* routine. This routine might */
|
395 |
|
|
/* preempt the currently */
|
396 |
|
|
/* running thread. */
|
397 |
|
|
|
398 |
|
|
/* Now that interrupt end is complete, subtract the previous */
|
399 |
|
|
/* interrupt level back out of the scheduler lock. */
|
400 |
|
|
|
401 |
|
|
sub.l %d2,cyg_scheduler_sched_lock
|
402 |
|
|
|
403 |
|
|
#endif
|
404 |
|
|
|
405 |
|
|
lea (4*3)(%sp),%sp /* Remove the isr_ret, intr, */
|
406 |
|
|
/* and regs parameters from the */
|
407 |
|
|
/* stack. */
|
408 |
|
|
|
409 |
|
|
int_rest_regs /* Restore the preserved */
|
410 |
|
|
/* registers for the current */
|
411 |
|
|
/* thread. */
|
412 |
|
|
|
413 |
|
|
rte /* Restore the SR and PC. */
|
414 |
|
|
|
415 |
|
|
/****************************************************************************/
|
416 |
|
|
/* Autovector interrupt vector handler. */
|
417 |
|
|
|
418 |
|
|
/* Control is transferred here from an interrupt autovector (#25-31). */
|
419 |
|
|
|
420 |
|
|
/* Before branching to common code, load a value to translate the */
|
421 |
|
|
/* vector table offset to the ISR table offset. The ISR vector table */
|
422 |
|
|
/* contains the autovectors (0-6) followed by the interrupt vectors */
|
423 |
|
|
/* (7-198). */
|
424 |
|
|
|
425 |
|
|
.text
|
426 |
|
|
.balign 4
|
427 |
|
|
hw_vsr_autovec:
|
428 |
|
|
|
429 |
|
|
int_pres_regs /* Preserve all registers */
|
430 |
|
|
/* that this ISR routine needs */
|
431 |
|
|
/* to preserve. The C code */
|
432 |
|
|
/* will preserve all other */
|
433 |
|
|
/* registers. */
|
434 |
|
|
|
435 |
|
|
move.l #(-25+0)*4,%d0 /* Adding this value to the */
|
436 |
|
|
/* vector table offset will */
|
437 |
|
|
/* result in the corresponding */
|
438 |
|
|
/* offset into the ISR table. */
|
439 |
|
|
|
440 |
|
|
jra hw_vsr_int_common /* Branch into common code. */
|
441 |
|
|
|
442 |
|
|
/****************************************************************************/
|
443 |
|
|
/* hw_vsr_reset -- Hardware Reset Vector */
|
444 |
|
|
|
445 |
|
|
/* We assume that most of the chip selects are configured by the boot */
|
446 |
|
|
/* loader. */
|
447 |
|
|
|
448 |
|
|
.text
|
449 |
|
|
.balign 4
|
450 |
|
|
.globl hw_vsr_reset
|
451 |
|
|
hw_vsr_reset:
|
452 |
|
|
|
453 |
|
|
.globl __exception_reset /* Define the entry point for */
|
454 |
|
|
__exception_reset: /* the linker. */
|
455 |
|
|
|
456 |
|
|
move.w #0x2700,%sr /* Make sure that all */
|
457 |
|
|
/* interrupts are masked. */
|
458 |
|
|
|
459 |
|
|
lea hw_vsr_stack,%sp /* Load the reset and */
|
460 |
|
|
/* interrupt stack pointer. */
|
461 |
|
|
|
462 |
|
|
lea 0,%fp /* Set up the initial frame */
|
463 |
|
|
link %fp,#0 /* pointer. */
|
464 |
|
|
|
465 |
|
|
.extern hal_reset /* Call the C routine to */
|
466 |
|
|
jbsr hal_reset /* complete the reset process. */
|
467 |
|
|
|
468 |
|
|
9: stop #0x2000 /* If we return, stop. */
|
469 |
|
|
jra 9b
|
470 |
|
|
|
471 |
|
|
/****************************************************************************/
|
472 |
|
|
/* Interrupt and reset stack */
|
473 |
|
|
|
474 |
|
|
/* WARNING: Do not put this in any memory section that gets */
|
475 |
|
|
/* initialized. Doing so may cause the C code to initialize its own stack. */
|
476 |
|
|
|
477 |
|
|
.section ".uninvar","aw",@nobits
|
478 |
|
|
|
479 |
|
|
.balign 16
|
480 |
|
|
.global hw_vsr_stack_bottom
|
481 |
|
|
hw_vsr_stack_bottom:
|
482 |
|
|
.skip 0x2000
|
483 |
|
|
.balign 16
|
484 |
|
|
.global hw_vsr_stack
|
485 |
|
|
hw_vsr_stack:
|
486 |
|
|
.skip 0x10
|
487 |
|
|
|
488 |
|
|
/****************************************************************************/
|
489 |
|
|
|