]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blame - include/asm-ia64/machvec.h
IRQ: Maintain regs pointer globally rather than passing to IRQ handlers
[mirror_ubuntu-artful-kernel.git] / include / asm-ia64 / machvec.h
CommitLineData
1da177e4
LT
1/*
2 * Machine vector for IA-64.
3 *
4 * Copyright (C) 1999 Silicon Graphics, Inc.
5 * Copyright (C) Srinivasa Thirumalachar <sprasad@engr.sgi.com>
6 * Copyright (C) Vijay Chander <vijay@engr.sgi.com>
7 * Copyright (C) 1999-2001, 2003-2004 Hewlett-Packard Co.
8 * David Mosberger-Tang <davidm@hpl.hp.com>
9 */
10#ifndef _ASM_IA64_MACHVEC_H
11#define _ASM_IA64_MACHVEC_H
12
1da177e4
LT
13#include <linux/types.h>
14
15/* forward declarations: */
16struct device;
17struct pt_regs;
18struct scatterlist;
19struct page;
20struct mm_struct;
21struct pci_bus;
e08e6c52 22struct task_struct;
3b7d1921 23struct pci_dev;
1da177e4
LT
24
25typedef void ia64_mv_setup_t (char **);
26typedef void ia64_mv_cpu_init_t (void);
27typedef void ia64_mv_irq_init_t (void);
28typedef void ia64_mv_send_ipi_t (int, int, int, int);
7d12e780 29typedef void ia64_mv_timer_interrupt_t (int, void *);
c1902aae 30typedef void ia64_mv_global_tlb_purge_t (struct mm_struct *, unsigned long, unsigned long, unsigned long);
1da177e4
LT
31typedef void ia64_mv_tlb_migrate_finish_t (struct mm_struct *);
32typedef unsigned int ia64_mv_local_vector_to_irq (u8);
33typedef char *ia64_mv_pci_get_legacy_mem_t (struct pci_bus *);
34typedef int ia64_mv_pci_legacy_read_t (struct pci_bus *, u16 port, u32 *val,
35 u8 size);
36typedef int ia64_mv_pci_legacy_write_t (struct pci_bus *, u16 port, u32 val,
37 u8 size);
e08e6c52 38typedef void ia64_mv_migrate_t(struct task_struct * task);
1da177e4
LT
39
40/* DMA-mapping interface: */
41typedef void ia64_mv_dma_init (void);
06a54497 42typedef void *ia64_mv_dma_alloc_coherent (struct device *, size_t, dma_addr_t *, gfp_t);
1da177e4
LT
43typedef void ia64_mv_dma_free_coherent (struct device *, size_t, void *, dma_addr_t);
44typedef dma_addr_t ia64_mv_dma_map_single (struct device *, void *, size_t, int);
45typedef void ia64_mv_dma_unmap_single (struct device *, dma_addr_t, size_t, int);
46typedef int ia64_mv_dma_map_sg (struct device *, struct scatterlist *, int, int);
47typedef void ia64_mv_dma_unmap_sg (struct device *, struct scatterlist *, int, int);
48typedef void ia64_mv_dma_sync_single_for_cpu (struct device *, dma_addr_t, size_t, int);
49typedef void ia64_mv_dma_sync_sg_for_cpu (struct device *, struct scatterlist *, int, int);
50typedef void ia64_mv_dma_sync_single_for_device (struct device *, dma_addr_t, size_t, int);
51typedef void ia64_mv_dma_sync_sg_for_device (struct device *, struct scatterlist *, int, int);
52typedef int ia64_mv_dma_mapping_error (dma_addr_t dma_addr);
53typedef int ia64_mv_dma_supported (struct device *, u64);
54
55/*
56 * WARNING: The legacy I/O space is _architected_. Platforms are
57 * expected to follow this architected model (see Section 10.7 in the
58 * IA-64 Architecture Software Developer's Manual). Unfortunately,
59 * some broken machines do not follow that model, which is why we have
60 * to make the inX/outX operations part of the machine vector.
61 * Platform designers should follow the architected model whenever
62 * possible.
63 */
64typedef unsigned int ia64_mv_inb_t (unsigned long);
65typedef unsigned int ia64_mv_inw_t (unsigned long);
66typedef unsigned int ia64_mv_inl_t (unsigned long);
67typedef void ia64_mv_outb_t (unsigned char, unsigned long);
68typedef void ia64_mv_outw_t (unsigned short, unsigned long);
69typedef void ia64_mv_outl_t (unsigned int, unsigned long);
70typedef void ia64_mv_mmiowb_t (void);
71typedef unsigned char ia64_mv_readb_t (const volatile void __iomem *);
72typedef unsigned short ia64_mv_readw_t (const volatile void __iomem *);
73typedef unsigned int ia64_mv_readl_t (const volatile void __iomem *);
74typedef unsigned long ia64_mv_readq_t (const volatile void __iomem *);
75typedef unsigned char ia64_mv_readb_relaxed_t (const volatile void __iomem *);
76typedef unsigned short ia64_mv_readw_relaxed_t (const volatile void __iomem *);
77typedef unsigned int ia64_mv_readl_relaxed_t (const volatile void __iomem *);
78typedef unsigned long ia64_mv_readq_relaxed_t (const volatile void __iomem *);
3b7d1921
EB
79
80typedef int ia64_mv_setup_msi_irq_t (unsigned int irq, struct pci_dev *pdev);
81typedef void ia64_mv_teardown_msi_irq_t (unsigned int irq);
1da177e4
LT
82
83static inline void
84machvec_noop (void)
85{
86}
87
88static inline void
89machvec_noop_mm (struct mm_struct *mm)
90{
91}
92
e08e6c52
BC
93static inline void
94machvec_noop_task (struct task_struct *task)
95{
96}
97
1da177e4 98extern void machvec_setup (char **);
7d12e780 99extern void machvec_timer_interrupt (int, void *);
1da177e4
LT
100extern void machvec_dma_sync_single (struct device *, dma_addr_t, size_t, int);
101extern void machvec_dma_sync_sg (struct device *, struct scatterlist *, int, int);
102extern void machvec_tlb_migrate_finish (struct mm_struct *);
103
104# if defined (CONFIG_IA64_HP_SIM)
105# include <asm/machvec_hpsim.h>
106# elif defined (CONFIG_IA64_DIG)
107# include <asm/machvec_dig.h>
108# elif defined (CONFIG_IA64_HP_ZX1)
109# include <asm/machvec_hpzx1.h>
110# elif defined (CONFIG_IA64_HP_ZX1_SWIOTLB)
111# include <asm/machvec_hpzx1_swiotlb.h>
112# elif defined (CONFIG_IA64_SGI_SN2)
113# include <asm/machvec_sn2.h>
114# elif defined (CONFIG_IA64_GENERIC)
115
116# ifdef MACHVEC_PLATFORM_HEADER
117# include MACHVEC_PLATFORM_HEADER
118# else
119# define platform_name ia64_mv.name
120# define platform_setup ia64_mv.setup
121# define platform_cpu_init ia64_mv.cpu_init
122# define platform_irq_init ia64_mv.irq_init
123# define platform_send_ipi ia64_mv.send_ipi
124# define platform_timer_interrupt ia64_mv.timer_interrupt
125# define platform_global_tlb_purge ia64_mv.global_tlb_purge
126# define platform_tlb_migrate_finish ia64_mv.tlb_migrate_finish
127# define platform_dma_init ia64_mv.dma_init
128# define platform_dma_alloc_coherent ia64_mv.dma_alloc_coherent
129# define platform_dma_free_coherent ia64_mv.dma_free_coherent
130# define platform_dma_map_single ia64_mv.dma_map_single
131# define platform_dma_unmap_single ia64_mv.dma_unmap_single
132# define platform_dma_map_sg ia64_mv.dma_map_sg
133# define platform_dma_unmap_sg ia64_mv.dma_unmap_sg
134# define platform_dma_sync_single_for_cpu ia64_mv.dma_sync_single_for_cpu
135# define platform_dma_sync_sg_for_cpu ia64_mv.dma_sync_sg_for_cpu
136# define platform_dma_sync_single_for_device ia64_mv.dma_sync_single_for_device
137# define platform_dma_sync_sg_for_device ia64_mv.dma_sync_sg_for_device
138# define platform_dma_mapping_error ia64_mv.dma_mapping_error
139# define platform_dma_supported ia64_mv.dma_supported
140# define platform_local_vector_to_irq ia64_mv.local_vector_to_irq
141# define platform_pci_get_legacy_mem ia64_mv.pci_get_legacy_mem
142# define platform_pci_legacy_read ia64_mv.pci_legacy_read
143# define platform_pci_legacy_write ia64_mv.pci_legacy_write
144# define platform_inb ia64_mv.inb
145# define platform_inw ia64_mv.inw
146# define platform_inl ia64_mv.inl
147# define platform_outb ia64_mv.outb
148# define platform_outw ia64_mv.outw
149# define platform_outl ia64_mv.outl
150# define platform_mmiowb ia64_mv.mmiowb
151# define platform_readb ia64_mv.readb
152# define platform_readw ia64_mv.readw
153# define platform_readl ia64_mv.readl
154# define platform_readq ia64_mv.readq
155# define platform_readb_relaxed ia64_mv.readb_relaxed
156# define platform_readw_relaxed ia64_mv.readw_relaxed
157# define platform_readl_relaxed ia64_mv.readl_relaxed
158# define platform_readq_relaxed ia64_mv.readq_relaxed
e08e6c52 159# define platform_migrate ia64_mv.migrate
3b7d1921
EB
160# define platform_setup_msi_irq ia64_mv.setup_msi_irq
161# define platform_teardown_msi_irq ia64_mv.teardown_msi_irq
1da177e4
LT
162# endif
163
164/* __attribute__((__aligned__(16))) is required to make size of the
165 * structure multiple of 16 bytes.
166 * This will fillup the holes created because of section 3.3.1 in
167 * Software Conventions guide.
168 */
169struct ia64_machine_vector {
170 const char *name;
171 ia64_mv_setup_t *setup;
172 ia64_mv_cpu_init_t *cpu_init;
173 ia64_mv_irq_init_t *irq_init;
174 ia64_mv_send_ipi_t *send_ipi;
175 ia64_mv_timer_interrupt_t *timer_interrupt;
176 ia64_mv_global_tlb_purge_t *global_tlb_purge;
177 ia64_mv_tlb_migrate_finish_t *tlb_migrate_finish;
178 ia64_mv_dma_init *dma_init;
179 ia64_mv_dma_alloc_coherent *dma_alloc_coherent;
180 ia64_mv_dma_free_coherent *dma_free_coherent;
181 ia64_mv_dma_map_single *dma_map_single;
182 ia64_mv_dma_unmap_single *dma_unmap_single;
183 ia64_mv_dma_map_sg *dma_map_sg;
184 ia64_mv_dma_unmap_sg *dma_unmap_sg;
185 ia64_mv_dma_sync_single_for_cpu *dma_sync_single_for_cpu;
186 ia64_mv_dma_sync_sg_for_cpu *dma_sync_sg_for_cpu;
187 ia64_mv_dma_sync_single_for_device *dma_sync_single_for_device;
188 ia64_mv_dma_sync_sg_for_device *dma_sync_sg_for_device;
189 ia64_mv_dma_mapping_error *dma_mapping_error;
190 ia64_mv_dma_supported *dma_supported;
191 ia64_mv_local_vector_to_irq *local_vector_to_irq;
192 ia64_mv_pci_get_legacy_mem_t *pci_get_legacy_mem;
193 ia64_mv_pci_legacy_read_t *pci_legacy_read;
194 ia64_mv_pci_legacy_write_t *pci_legacy_write;
195 ia64_mv_inb_t *inb;
196 ia64_mv_inw_t *inw;
197 ia64_mv_inl_t *inl;
198 ia64_mv_outb_t *outb;
199 ia64_mv_outw_t *outw;
200 ia64_mv_outl_t *outl;
201 ia64_mv_mmiowb_t *mmiowb;
202 ia64_mv_readb_t *readb;
203 ia64_mv_readw_t *readw;
204 ia64_mv_readl_t *readl;
205 ia64_mv_readq_t *readq;
206 ia64_mv_readb_relaxed_t *readb_relaxed;
207 ia64_mv_readw_relaxed_t *readw_relaxed;
208 ia64_mv_readl_relaxed_t *readl_relaxed;
209 ia64_mv_readq_relaxed_t *readq_relaxed;
e08e6c52 210 ia64_mv_migrate_t *migrate;
3b7d1921
EB
211 ia64_mv_setup_msi_irq_t *setup_msi_irq;
212 ia64_mv_teardown_msi_irq_t *teardown_msi_irq;
1da177e4
LT
213} __attribute__((__aligned__(16))); /* align attrib? see above comment */
214
215#define MACHVEC_INIT(name) \
216{ \
217 #name, \
218 platform_setup, \
219 platform_cpu_init, \
220 platform_irq_init, \
221 platform_send_ipi, \
222 platform_timer_interrupt, \
223 platform_global_tlb_purge, \
224 platform_tlb_migrate_finish, \
225 platform_dma_init, \
226 platform_dma_alloc_coherent, \
227 platform_dma_free_coherent, \
228 platform_dma_map_single, \
229 platform_dma_unmap_single, \
230 platform_dma_map_sg, \
231 platform_dma_unmap_sg, \
232 platform_dma_sync_single_for_cpu, \
233 platform_dma_sync_sg_for_cpu, \
234 platform_dma_sync_single_for_device, \
235 platform_dma_sync_sg_for_device, \
236 platform_dma_mapping_error, \
237 platform_dma_supported, \
238 platform_local_vector_to_irq, \
239 platform_pci_get_legacy_mem, \
240 platform_pci_legacy_read, \
241 platform_pci_legacy_write, \
242 platform_inb, \
243 platform_inw, \
244 platform_inl, \
245 platform_outb, \
246 platform_outw, \
247 platform_outl, \
248 platform_mmiowb, \
249 platform_readb, \
250 platform_readw, \
251 platform_readl, \
252 platform_readq, \
253 platform_readb_relaxed, \
254 platform_readw_relaxed, \
255 platform_readl_relaxed, \
256 platform_readq_relaxed, \
e08e6c52 257 platform_migrate, \
3b7d1921
EB
258 platform_setup_msi_irq, \
259 platform_teardown_msi_irq, \
1da177e4
LT
260}
261
262extern struct ia64_machine_vector ia64_mv;
263extern void machvec_init (const char *name);
264
265# else
266# error Unknown configuration. Update asm-ia64/machvec.h.
267# endif /* CONFIG_IA64_GENERIC */
268
269/*
270 * Declare default routines which aren't declared anywhere else:
271 */
272extern ia64_mv_dma_init swiotlb_init;
273extern ia64_mv_dma_alloc_coherent swiotlb_alloc_coherent;
274extern ia64_mv_dma_free_coherent swiotlb_free_coherent;
275extern ia64_mv_dma_map_single swiotlb_map_single;
276extern ia64_mv_dma_unmap_single swiotlb_unmap_single;
277extern ia64_mv_dma_map_sg swiotlb_map_sg;
278extern ia64_mv_dma_unmap_sg swiotlb_unmap_sg;
279extern ia64_mv_dma_sync_single_for_cpu swiotlb_sync_single_for_cpu;
280extern ia64_mv_dma_sync_sg_for_cpu swiotlb_sync_sg_for_cpu;
281extern ia64_mv_dma_sync_single_for_device swiotlb_sync_single_for_device;
282extern ia64_mv_dma_sync_sg_for_device swiotlb_sync_sg_for_device;
283extern ia64_mv_dma_mapping_error swiotlb_dma_mapping_error;
284extern ia64_mv_dma_supported swiotlb_dma_supported;
285
286/*
287 * Define default versions so we can extend machvec for new platforms without having
288 * to update the machvec files for all existing platforms.
289 */
290#ifndef platform_setup
291# define platform_setup machvec_setup
292#endif
293#ifndef platform_cpu_init
294# define platform_cpu_init machvec_noop
295#endif
296#ifndef platform_irq_init
297# define platform_irq_init machvec_noop
298#endif
299
300#ifndef platform_send_ipi
301# define platform_send_ipi ia64_send_ipi /* default to architected version */
302#endif
303#ifndef platform_timer_interrupt
304# define platform_timer_interrupt machvec_timer_interrupt
305#endif
306#ifndef platform_global_tlb_purge
307# define platform_global_tlb_purge ia64_global_tlb_purge /* default to architected version */
308#endif
309#ifndef platform_tlb_migrate_finish
310# define platform_tlb_migrate_finish machvec_noop_mm
311#endif
312#ifndef platform_dma_init
313# define platform_dma_init swiotlb_init
314#endif
315#ifndef platform_dma_alloc_coherent
316# define platform_dma_alloc_coherent swiotlb_alloc_coherent
317#endif
318#ifndef platform_dma_free_coherent
319# define platform_dma_free_coherent swiotlb_free_coherent
320#endif
321#ifndef platform_dma_map_single
322# define platform_dma_map_single swiotlb_map_single
323#endif
324#ifndef platform_dma_unmap_single
325# define platform_dma_unmap_single swiotlb_unmap_single
326#endif
327#ifndef platform_dma_map_sg
328# define platform_dma_map_sg swiotlb_map_sg
329#endif
330#ifndef platform_dma_unmap_sg
331# define platform_dma_unmap_sg swiotlb_unmap_sg
332#endif
333#ifndef platform_dma_sync_single_for_cpu
334# define platform_dma_sync_single_for_cpu swiotlb_sync_single_for_cpu
335#endif
336#ifndef platform_dma_sync_sg_for_cpu
337# define platform_dma_sync_sg_for_cpu swiotlb_sync_sg_for_cpu
338#endif
339#ifndef platform_dma_sync_single_for_device
340# define platform_dma_sync_single_for_device swiotlb_sync_single_for_device
341#endif
342#ifndef platform_dma_sync_sg_for_device
343# define platform_dma_sync_sg_for_device swiotlb_sync_sg_for_device
344#endif
345#ifndef platform_dma_mapping_error
346# define platform_dma_mapping_error swiotlb_dma_mapping_error
347#endif
348#ifndef platform_dma_supported
349# define platform_dma_supported swiotlb_dma_supported
350#endif
351#ifndef platform_local_vector_to_irq
352# define platform_local_vector_to_irq __ia64_local_vector_to_irq
353#endif
354#ifndef platform_pci_get_legacy_mem
355# define platform_pci_get_legacy_mem ia64_pci_get_legacy_mem
356#endif
357#ifndef platform_pci_legacy_read
358# define platform_pci_legacy_read ia64_pci_legacy_read
a72391e4 359extern int ia64_pci_legacy_read(struct pci_bus *bus, u16 port, u32 *val, u8 size);
1da177e4
LT
360#endif
361#ifndef platform_pci_legacy_write
362# define platform_pci_legacy_write ia64_pci_legacy_write
a72391e4 363extern int ia64_pci_legacy_write(struct pci_bus *bus, u16 port, u32 val, u8 size);
1da177e4
LT
364#endif
365#ifndef platform_inb
366# define platform_inb __ia64_inb
367#endif
368#ifndef platform_inw
369# define platform_inw __ia64_inw
370#endif
371#ifndef platform_inl
372# define platform_inl __ia64_inl
373#endif
374#ifndef platform_outb
375# define platform_outb __ia64_outb
376#endif
377#ifndef platform_outw
378# define platform_outw __ia64_outw
379#endif
380#ifndef platform_outl
381# define platform_outl __ia64_outl
382#endif
383#ifndef platform_mmiowb
384# define platform_mmiowb __ia64_mmiowb
385#endif
386#ifndef platform_readb
387# define platform_readb __ia64_readb
388#endif
389#ifndef platform_readw
390# define platform_readw __ia64_readw
391#endif
392#ifndef platform_readl
393# define platform_readl __ia64_readl
394#endif
395#ifndef platform_readq
396# define platform_readq __ia64_readq
397#endif
398#ifndef platform_readb_relaxed
399# define platform_readb_relaxed __ia64_readb_relaxed
400#endif
401#ifndef platform_readw_relaxed
402# define platform_readw_relaxed __ia64_readw_relaxed
403#endif
404#ifndef platform_readl_relaxed
405# define platform_readl_relaxed __ia64_readl_relaxed
406#endif
407#ifndef platform_readq_relaxed
408# define platform_readq_relaxed __ia64_readq_relaxed
409#endif
e08e6c52
BC
410#ifndef platform_migrate
411# define platform_migrate machvec_noop_task
412#endif
3b7d1921
EB
413#ifndef platform_setup_msi_irq
414# define platform_setup_msi_irq ((ia64_mv_setup_msi_irq_t*)NULL)
415#endif
416#ifndef platform_teardown_msi_irq
417# define platform_teardown_msi_irq ((ia64_mv_teardown_msi_irq_t*)NULL)
fd58e55f 418#endif
1da177e4
LT
419
420#endif /* _ASM_IA64_MACHVEC_H */