1 #ifndef _ASM_IA64_DELAY_H
2 #define _ASM_IA64_DELAY_H
5 * Delay routines using a pre-computed "cycles/usec" value.
7 * Copyright (C) 1998, 1999 Hewlett-Packard Co
8 * Copyright (C) 1998, 1999 David Mosberger-Tang <davidm@hpl.hp.com>
9 * Copyright (C) 1999 VA Linux Systems
10 * Copyright (C) 1999 Walt Drummond <drummond@valinux.com>
11 * Copyright (C) 1999 Asit Mallick <asit.k.mallick@intel.com>
12 * Copyright (C) 1999 Don Dugger <don.dugger@intel.com>
15 #include <linux/config.h>
16 #include <linux/kernel.h>
17 #include <linux/sched.h>
19 #include <asm/processor.h>
21 static __inline__ void
22 ia64_set_itm (unsigned long val)
24 __asm__ __volatile__("mov cr.itm=%0;; srlz.d;;" :: "r"(val) : "memory");
27 static __inline__ unsigned long
32 __asm__ __volatile__("mov %0=cr.itm;; srlz.d;;" : "=r"(result) :: "memory");
36 static __inline__ void
37 ia64_set_itv (unsigned long val)
39 __asm__ __volatile__("mov cr.itv=%0;; srlz.d;;" :: "r"(val) : "memory");
42 static __inline__ void
43 ia64_set_itc (unsigned long val)
45 __asm__ __volatile__("mov ar.itc=%0;; srlz.d;;" :: "r"(val) : "memory");
48 static __inline__ unsigned long
53 __asm__ __volatile__("mov %0=ar.itc" : "=r"(result) :: "memory");
55 while (__builtin_expect ((__s32) result == -1, 0))
56 __asm__ __volatile__("mov %0=ar.itc" : "=r"(result) :: "memory");
61 static __inline__ void
62 __delay (unsigned long loops)
64 unsigned long saved_ar_lc;
69 __asm__ __volatile__("mov %0=ar.lc;;" : "=r"(saved_ar_lc));
70 __asm__ __volatile__("mov ar.lc=%0;;" :: "r"(loops - 1));
71 __asm__ __volatile__("1:\tbr.cloop.sptk.few 1b;;");
72 __asm__ __volatile__("mov ar.lc=%0" :: "r"(saved_ar_lc));
75 static __inline__ void
76 udelay (unsigned long usecs)
78 unsigned long start = ia64_get_itc();
79 unsigned long cycles = usecs*local_cpu_data->cyc_per_usec;
81 while (ia64_get_itc() - start < cycles)
85 #endif /* _ASM_IA64_DELAY_H */