X-Git-Url: http://git.rot13.org/?a=blobdiff_plain;f=include%2Fasm-parisc%2Fsystem.h;h=ee80c920b464e549848664794ac742a9e4714fd9;hb=3b470ac43fcd9848fa65e58e54875ad75be61cec;hp=7e9afa720d4389b31e5a4fc1e1164ec23df5def0;hpb=db7ce76f6b3dfc119ab2f03e5e7784afc9cf2f05;p=powerpc.git diff --git a/include/asm-parisc/system.h b/include/asm-parisc/system.h index 7e9afa720d..ee80c920b4 100644 --- a/include/asm-parisc/system.h +++ b/include/asm-parisc/system.h @@ -48,17 +48,6 @@ extern struct task_struct *_switch_to(struct task_struct *, struct task_struct * (last) = _switch_to(prev, next); \ } while(0) -/* - * On SMP systems, when the scheduler does migration-cost autodetection, - * it needs a way to flush as much of the CPU's caches as possible. - * - * TODO: fill this in! - */ -static inline void sched_cacheflush(void) -{ -} - - /* interrupt control */ #define local_save_flags(x) __asm__ __volatile__("ssm 0, %0" : "=r" (x) : : "memory") #define local_irq_disable() __asm__ __volatile__("rsm %0,%%r0\n" : : "i" (PSW_I) : "memory" ) @@ -188,7 +177,6 @@ static inline void set_eiem(unsigned long val) # define __lock_aligned __attribute__((__section__(".data.lock_aligned"))) #endif -#define KERNEL_START (0x10100000 - 0x1000) #define arch_align_stack(x) (x) #endif