OpenCores
URL https://opencores.org/ocsvn/or1k_old/or1k_old/trunk

Subversion Repositories or1k_old

[/] [or1k_old/] [trunk/] [rc203soc/] [sw/] [uClinux/] [include/] [asm-m68knommu/] [system.h] - Diff between revs 1765 and 1782

Only display areas with differences | Details | Blame | View Log

Rev 1765 Rev 1782
#ifndef _M68K_SYSTEM_H
#ifndef _M68K_SYSTEM_H
#define _M68K_SYSTEM_H
#define _M68K_SYSTEM_H
 
 
#include <linux/config.h> /* get configuration macros */
#include <linux/config.h> /* get configuration macros */
#include <linux/linkage.h>
#include <linux/linkage.h>
#include <asm/segment.h>
#include <asm/segment.h>
 
 
#ifdef CONFIG_M68328
#ifdef CONFIG_M68328
#include <asm/MC68328.h>
#include <asm/MC68328.h>
#endif
#endif
 
 
#ifdef CONFIG_M68EZ328
#ifdef CONFIG_M68EZ328
#include <asm/MC68EZ328.h>
#include <asm/MC68EZ328.h>
#endif
#endif
 
 
extern inline unsigned long rdusp(void) {
extern inline unsigned long rdusp(void) {
#ifdef CONFIG_COLDFIRE
#ifdef CONFIG_COLDFIRE
        extern unsigned int     sw_usp;
        extern unsigned int     sw_usp;
        return(sw_usp);
        return(sw_usp);
#else
#else
        unsigned long usp;
        unsigned long usp;
        __asm__ __volatile__("move %/usp,%0"
        __asm__ __volatile__("move %/usp,%0"
                             : "=a" (usp));
                             : "=a" (usp));
        return usp;
        return usp;
#endif
#endif
}
}
 
 
extern inline void wrusp(unsigned long usp) {
extern inline void wrusp(unsigned long usp) {
#ifdef CONFIG_COLDFIRE
#ifdef CONFIG_COLDFIRE
        extern unsigned int     sw_usp;
        extern unsigned int     sw_usp;
        sw_usp = usp;
        sw_usp = usp;
#else
#else
        __asm__ __volatile__("move %0,%/usp"
        __asm__ __volatile__("move %0,%/usp"
                             :
                             :
                             : "a" (usp));
                             : "a" (usp));
#endif
#endif
}
}
 
 
extern inline unsigned long rda5(void) {
extern inline unsigned long rda5(void) {
        unsigned long a5;
        unsigned long a5;
 
 
        __asm__ __volatile__("movel %/a5,%0"
        __asm__ __volatile__("movel %/a5,%0"
                             : "=a" (a5));
                             : "=a" (a5));
        return a5;
        return a5;
}
}
 
 
extern inline void wra5(unsigned long a5) {
extern inline void wra5(unsigned long a5) {
        __asm__ __volatile__("movel %0,%/a5"
        __asm__ __volatile__("movel %0,%/a5"
                             :
                             :
                             : "a" (a5));
                             : "a" (a5));
}
}
 
 
/*
/*
 * switch_to(n) should switch tasks to task ptr, first checking that
 * switch_to(n) should switch tasks to task ptr, first checking that
 * ptr isn't the current task, in which case it does nothing.  This
 * ptr isn't the current task, in which case it does nothing.  This
 * also clears the TS-flag if the task we switched to has used the
 * also clears the TS-flag if the task we switched to has used the
 * math co-processor latest.
 * math co-processor latest.
 */
 */
/*
/*
 * switch_to() saves the extra registers, that are not saved
 * switch_to() saves the extra registers, that are not saved
 * automatically by SAVE_SWITCH_STACK in resume(), ie. d0-d5 and
 * automatically by SAVE_SWITCH_STACK in resume(), ie. d0-d5 and
 * a0-a1. Some of these are used by schedule() and its predecessors
 * a0-a1. Some of these are used by schedule() and its predecessors
 * and so we might get see unexpected behaviors when a task returns
 * and so we might get see unexpected behaviors when a task returns
 * with unexpected register values.
 * with unexpected register values.
 *
 *
 * syscall stores these registers itself and none of them are used
 * syscall stores these registers itself and none of them are used
 * by syscall after the function in the syscall has been called.
 * by syscall after the function in the syscall has been called.
 *
 *
 * Beware that resume now expects *next to be in d1 and the offset of
 * Beware that resume now expects *next to be in d1 and the offset of
 * tss to be in a1. This saves a few instructions as we no longer have
 * tss to be in a1. This saves a few instructions as we no longer have
 * to push them onto the stack and read them back right after.
 * to push them onto the stack and read them back right after.
 *
 *
 * 02/17/96 - Jes Sorensen (jds@kom.auc.dk)
 * 02/17/96 - Jes Sorensen (jds@kom.auc.dk)
 *
 *
 * Changed 96/09/19 by Andreas Schwab
 * Changed 96/09/19 by Andreas Schwab
 * pass prev in a0, next in a1, offset of tss in d1, and whether
 * pass prev in a0, next in a1, offset of tss in d1, and whether
 * the mm structures are shared in d2 (to avoid atc flushing).
 * the mm structures are shared in d2 (to avoid atc flushing).
 */
 */
asmlinkage void resume(void);
asmlinkage void resume(void);
#define switch_to(prev,next) { \
#define switch_to(prev,next) { \
  register void *_prev __asm__ ("a0") = (prev); \
  register void *_prev __asm__ ("a0") = (prev); \
  register void *_next __asm__ ("a1") = (next); \
  register void *_next __asm__ ("a1") = (next); \
  register int _tssoff __asm__ ("d1") = (int)&((struct task_struct *)0)->tss; \
  register int _tssoff __asm__ ("d1") = (int)&((struct task_struct *)0)->tss; \
  register char _shared __asm__ ("d2") = ((prev)->mm == (next)->mm); \
  register char _shared __asm__ ("d2") = ((prev)->mm == (next)->mm); \
  __asm__ __volatile__("jbsr " SYMBOL_NAME_STR(resume) "\n\t" \
  __asm__ __volatile__("jbsr " SYMBOL_NAME_STR(resume) "\n\t" \
                       : : "a" (_prev), "a" (_next), "d" (_tssoff), \
                       : : "a" (_prev), "a" (_next), "d" (_tssoff), \
                           "d" (_shared) \
                           "d" (_shared) \
                       : "d0", "d1", "d2", "d3", "d4", "d5", "a0", "a1"); \
                       : "d0", "d1", "d2", "d3", "d4", "d5", "a0", "a1"); \
}
}
 
 
#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
#define tas(ptr) (xchg((ptr),1))
#define tas(ptr) (xchg((ptr),1))
 
 
struct __xchg_dummy { unsigned long a[100]; };
struct __xchg_dummy { unsigned long a[100]; };
#define __xg(x) ((volatile struct __xchg_dummy *)(x))
#define __xg(x) ((volatile struct __xchg_dummy *)(x))
 
 
#ifdef CONFIG_COLDFIRE
#ifdef CONFIG_COLDFIRE
#define sti() __asm__ __volatile__ ( \
#define sti() __asm__ __volatile__ ( \
        "move %/sr,%%d0\n\t" \
        "move %/sr,%%d0\n\t" \
        "andi.l #0xf8ff,%%d0\n\t" \
        "andi.l #0xf8ff,%%d0\n\t" \
        "move %%d0,%/sr\n" \
        "move %%d0,%/sr\n" \
        : /* no outputs */ \
        : /* no outputs */ \
        : \
        : \
        : "%d0", "memory")
        : "%d0", "memory")
#define cli() __asm__ __volatile__ ( \
#define cli() __asm__ __volatile__ ( \
        "move %/sr,%%d0\n\t" \
        "move %/sr,%%d0\n\t" \
        "ori.l  #0x0700,%%d0\n\t" \
        "ori.l  #0x0700,%%d0\n\t" \
        "move %%d0,%/sr\n" \
        "move %%d0,%/sr\n" \
        : /* no inputs */ \
        : /* no inputs */ \
        : \
        : \
        : "%d0", "memory")
        : "%d0", "memory")
#else
#else
#if defined(CONFIG_ATARI) && !defined(CONFIG_AMIGA) && !defined(CONFIG_MAC)
#if defined(CONFIG_ATARI) && !defined(CONFIG_AMIGA) && !defined(CONFIG_MAC)
/* block out HSYNC on the atari */
/* block out HSYNC on the atari */
#define sti() __asm__ __volatile__ ("andiw #0xfbff,%/sr": : : "memory")
#define sti() __asm__ __volatile__ ("andiw #0xfbff,%/sr": : : "memory")
#else /* portable version */
#else /* portable version */
#define sti() __asm__ __volatile__ ("andiw #0xf8ff,%/sr": : : "memory")
#define sti() __asm__ __volatile__ ("andiw #0xf8ff,%/sr": : : "memory")
#endif /* machine compilation types */ 
#endif /* machine compilation types */ 
#define cli() __asm__ __volatile__ ("oriw  #0x0700,%/sr": : : "memory")
#define cli() __asm__ __volatile__ ("oriw  #0x0700,%/sr": : : "memory")
#endif
#endif
 
 
#define nop() __asm__ __volatile__ ("nop"::)
#define nop() __asm__ __volatile__ ("nop"::)
#define mb()  __asm__ __volatile__ (""   : : :"memory")
#define mb()  __asm__ __volatile__ (""   : : :"memory")
 
 
#define save_flags(x) \
#define save_flags(x) \
__asm__ __volatile__("movew %/sr,%0":"=d" (x) : /* no input */ :"memory")
__asm__ __volatile__("movew %/sr,%0":"=d" (x) : /* no input */ :"memory")
 
 
#define restore_flags(x) \
#define restore_flags(x) \
__asm__ __volatile__("movew %0,%/sr": /* no outputs */ :"d" (x) : "memory")
__asm__ __volatile__("movew %0,%/sr": /* no outputs */ :"d" (x) : "memory")
 
 
#define iret() __asm__ __volatile__ ("rte": : :"memory", "sp", "cc")
#define iret() __asm__ __volatile__ ("rte": : :"memory", "sp", "cc")
 
 
#ifndef CONFIG_RMW_INSNS
#ifndef CONFIG_RMW_INSNS
static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
{
{
  unsigned long tmp, flags;
  unsigned long tmp, flags;
 
 
  save_flags(flags);
  save_flags(flags);
  cli();
  cli();
 
 
  switch (size) {
  switch (size) {
  case 1:
  case 1:
    __asm__ __volatile__
    __asm__ __volatile__
    ("moveb %2,%0\n\t"
    ("moveb %2,%0\n\t"
     "moveb %1,%2"
     "moveb %1,%2"
    : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
    : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
    break;
    break;
  case 2:
  case 2:
    __asm__ __volatile__
    __asm__ __volatile__
    ("movew %2,%0\n\t"
    ("movew %2,%0\n\t"
     "movew %1,%2"
     "movew %1,%2"
    : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
    : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
    break;
    break;
  case 4:
  case 4:
    __asm__ __volatile__
    __asm__ __volatile__
    ("movel %2,%0\n\t"
    ("movel %2,%0\n\t"
     "movel %1,%2"
     "movel %1,%2"
    : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
    : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
    break;
    break;
  }
  }
  restore_flags(flags);
  restore_flags(flags);
  return tmp;
  return tmp;
}
}
#else
#else
static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size)
{
{
        switch (size) {
        switch (size) {
            case 1:
            case 1:
                __asm__ __volatile__
                __asm__ __volatile__
                        ("moveb %2,%0\n\t"
                        ("moveb %2,%0\n\t"
                         "1:\n\t"
                         "1:\n\t"
                         "casb %0,%1,%2\n\t"
                         "casb %0,%1,%2\n\t"
                         "jne 1b"
                         "jne 1b"
                         : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
                         : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
                break;
                break;
            case 2:
            case 2:
                __asm__ __volatile__
                __asm__ __volatile__
                        ("movew %2,%0\n\t"
                        ("movew %2,%0\n\t"
                         "1:\n\t"
                         "1:\n\t"
                         "casw %0,%1,%2\n\t"
                         "casw %0,%1,%2\n\t"
                         "jne 1b"
                         "jne 1b"
                         : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
                         : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
                break;
                break;
            case 4:
            case 4:
                __asm__ __volatile__
                __asm__ __volatile__
                        ("movel %2,%0\n\t"
                        ("movel %2,%0\n\t"
                         "1:\n\t"
                         "1:\n\t"
                         "casl %0,%1,%2\n\t"
                         "casl %0,%1,%2\n\t"
                         "jne 1b"
                         "jne 1b"
                         : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
                         : "=&d" (x) : "d" (x), "m" (*__xg(ptr)) : "memory");
                break;
                break;
        }
        }
        return x;
        return x;
}
}
#endif
#endif
 
 
#ifdef CONFIG_M68332
#ifdef CONFIG_M68332
#define HARD_RESET_NOW() ({             \
#define HARD_RESET_NOW() ({             \
        cli();                          \
        cli();                          \
        asm("                           \
        asm("                           \
        movew   #0x0000, 0xfffa6a;      \
        movew   #0x0000, 0xfffa6a;      \
        reset;                          \
        reset;                          \
        /*movew #0x1557, 0xfffa44;*/    \
        /*movew #0x1557, 0xfffa44;*/    \
        /*movew #0x0155, 0xfffa46;*/    \
        /*movew #0x0155, 0xfffa46;*/    \
        moveal #0, %a0;                 \
        moveal #0, %a0;                 \
        movec %a0, %vbr;                \
        movec %a0, %vbr;                \
        moveal 0, %sp;                  \
        moveal 0, %sp;                  \
        moveal 4, %a0;                  \
        moveal 4, %a0;                  \
        jmp (%a0);                      \
        jmp (%a0);                      \
        ");                             \
        ");                             \
})
})
#endif
#endif
 
 
#if defined( CONFIG_M68328 ) || defined( CONFIG_M68EZ328 )
#if defined( CONFIG_M68328 ) || defined( CONFIG_M68EZ328 )
#define HARD_RESET_NOW() ({             \
#define HARD_RESET_NOW() ({             \
        cli();                          \
        cli();                          \
        asm("                           \
        asm("                           \
        moveal #0x10c00000, %a0;        \
        moveal #0x10c00000, %a0;        \
        moveb #0, 0xFFFFF300;           \
        moveb #0, 0xFFFFF300;           \
        moveal 0(%a0), %sp;             \
        moveal 0(%a0), %sp;             \
        moveal 4(%a0), %a0;             \
        moveal 4(%a0), %a0;             \
        jmp (%a0);                      \
        jmp (%a0);                      \
        ");                             \
        ");                             \
})
})
#endif
#endif
 
 
#ifdef CONFIG_COLDFIRE
#ifdef CONFIG_COLDFIRE
#if defined(CONFIG_NETtel) || defined(CONFIG_eLIA) || defined(CONFIG_MATtel)
#if defined(CONFIG_NETtel) || defined(CONFIG_eLIA) || defined(CONFIG_MATtel)
#define HARD_RESET_NOW() ({             \
#define HARD_RESET_NOW() ({             \
        asm("                           \
        asm("                           \
        movew #0x2700, %sr;             \
        movew #0x2700, %sr;             \
        moveal #0x10000044, %a0;        \
        moveal #0x10000044, %a0;        \
        movel #0xffffffff, (%a0);       \
        movel #0xffffffff, (%a0);       \
        moveal #0x10000001, %a0;        \
        moveal #0x10000001, %a0;        \
        moveb #0x00, (%a0);             \
        moveb #0x00, (%a0);             \
        moveal #0xf0000004, %a0;        \
        moveal #0xf0000004, %a0;        \
        moveal (%a0), %a0;              \
        moveal (%a0), %a0;              \
        jmp (%a0);                      \
        jmp (%a0);                      \
        ");                             \
        ");                             \
})
})
#else
#else
#define HARD_RESET_NOW() ({             \
#define HARD_RESET_NOW() ({             \
        asm("                           \
        asm("                           \
        movew #0x2700, %sr;             \
        movew #0x2700, %sr;             \
        moveal #0x4, %a0;               \
        moveal #0x4, %a0;               \
        moveal (%a0), %a0;              \
        moveal (%a0), %a0;              \
        jmp (%a0);                      \
        jmp (%a0);                      \
        ");                             \
        ");                             \
})
})
#endif
#endif
#endif
#endif
 
 
#endif /* _M68K_SYSTEM_H */
#endif /* _M68K_SYSTEM_H */
 
 

powered by: WebSVN 2.1.0

© copyright 1999-2024 OpenCores.org, equivalent to Oliscience, all rights reserved. OpenCores®, registered trademark.