This commit was manufactured by cvs2svn to create branch 'vserver'.
[linux-2.6.git] / include / linux / vserver / history.h
1 #ifndef _VX_HISTORY_H
2 #define _VX_HISTORY_H
3
4
5 enum {
6         VXH_UNUSED=0,
7         VXH_THROW_OOPS=1,
8
9         VXH_GET_VX_INFO,
10         VXH_PUT_VX_INFO,
11         VXH_INIT_VX_INFO,
12         VXH_SET_VX_INFO,
13         VXH_CLR_VX_INFO,
14         VXH_CLAIM_VX_INFO,
15         VXH_RELEASE_VX_INFO,
16         VXH_ALLOC_VX_INFO,
17         VXH_DEALLOC_VX_INFO,
18         VXH_HASH_VX_INFO,
19         VXH_UNHASH_VX_INFO,
20         VXH_LOC_VX_INFO,
21         VXH_LOOKUP_VX_INFO,
22         VXH_CREATE_VX_INFO,
23 };
24
25 struct _vxhe_vxi {
26         struct vx_info *ptr;
27         unsigned xid;
28         unsigned usecnt;
29         unsigned tasks;
30 };
31
32 struct _vxhe_set_clr {
33         void *data;
34 };
35
36 struct _vxhe_loc_lookup {
37         unsigned arg;
38 };
39
40 struct _vx_hist_entry {
41         void *loc;
42         unsigned short seq;
43         unsigned short type;
44         struct _vxhe_vxi vxi;
45         union {
46                 struct _vxhe_set_clr sc;
47                 struct _vxhe_loc_lookup ll;
48         };
49 };
50
51 #ifdef  CONFIG_VSERVER_HISTORY
52
53 extern unsigned volatile int vxh_active;
54
55 struct _vx_hist_entry *vxh_advance(void *loc);
56
57
58 static inline
59 void    __vxh_copy_vxi(struct _vx_hist_entry *entry, struct vx_info *vxi)
60 {
61         entry->vxi.ptr = vxi;
62         if (vxi) {
63                 entry->vxi.usecnt = atomic_read(&vxi->vx_usecnt);
64                 entry->vxi.tasks = atomic_read(&vxi->vx_tasks);
65                 entry->vxi.xid = vxi->vx_id;
66         }
67 }
68
69
70 #define __HERE__ current_text_addr()
71
72 #define __VXH_BODY(__type, __data, __here)      \
73         struct _vx_hist_entry *entry;           \
74                                                 \
75         preempt_disable();                      \
76         entry = vxh_advance(__here);            \
77         __data;                                 \
78         entry->type = __type;                   \
79         preempt_enable();
80
81
82         /* pass vxi only */
83
84 #define __VXH_SMPL                              \
85         __vxh_copy_vxi(entry, vxi)
86
87 static inline
88 void    __vxh_smpl(struct vx_info *vxi, int __type, void *__here)
89 {
90         __VXH_BODY(__type, __VXH_SMPL, __here)
91 }
92
93         /* pass vxi and data (void *) */
94
95 #define __VXH_DATA                              \
96         __vxh_copy_vxi(entry, vxi);             \
97         entry->sc.data = data
98
99 static inline
100 void    __vxh_data(struct vx_info *vxi, void *data,
101                         int __type, void *__here)
102 {
103         __VXH_BODY(__type, __VXH_DATA, __here)
104 }
105
106         /* pass vxi and arg (long) */
107
108 #define __VXH_LONG                              \
109         __vxh_copy_vxi(entry, vxi);             \
110         entry->ll.arg = arg
111
112 static inline
113 void    __vxh_long(struct vx_info *vxi, long arg,
114                         int __type, void *__here)
115 {
116         __VXH_BODY(__type, __VXH_LONG, __here)
117 }
118
119
120 static inline
121 void    __vxh_throw_oops(void *__here)
122 {
123         __VXH_BODY(VXH_THROW_OOPS, {}, __here);
124         /* prevent further acquisition */
125         vxh_active = 0;
126 }
127
128
129 #define vxh_throw_oops()        __vxh_throw_oops(__HERE__);
130
131 #define __vxh_get_vx_info(v,h)  __vxh_smpl(v, VXH_GET_VX_INFO, h);
132 #define __vxh_put_vx_info(v,h)  __vxh_smpl(v, VXH_PUT_VX_INFO, h);
133
134 #define __vxh_init_vx_info(v,d,h) \
135         __vxh_data(v,d, VXH_INIT_VX_INFO, h);
136 #define __vxh_set_vx_info(v,d,h) \
137         __vxh_data(v,d, VXH_SET_VX_INFO, h);
138 #define __vxh_clr_vx_info(v,d,h) \
139         __vxh_data(v,d, VXH_CLR_VX_INFO, h);
140
141 #define __vxh_claim_vx_info(v,d,h) \
142         __vxh_data(v,d, VXH_CLAIM_VX_INFO, h);
143 #define __vxh_release_vx_info(v,d,h) \
144         __vxh_data(v,d, VXH_RELEASE_VX_INFO, h);
145
146 #define vxh_alloc_vx_info(v) \
147         __vxh_smpl(v, VXH_ALLOC_VX_INFO, __HERE__);
148 #define vxh_dealloc_vx_info(v) \
149         __vxh_smpl(v, VXH_DEALLOC_VX_INFO, __HERE__);
150
151 #define vxh_hash_vx_info(v) \
152         __vxh_smpl(v, VXH_HASH_VX_INFO, __HERE__);
153 #define vxh_unhash_vx_info(v) \
154         __vxh_smpl(v, VXH_UNHASH_VX_INFO, __HERE__);
155
156 #define vxh_loc_vx_info(v,l) \
157         __vxh_long(v,l, VXH_LOC_VX_INFO, __HERE__);
158 #define vxh_lookup_vx_info(v,l) \
159         __vxh_long(v,l, VXH_LOOKUP_VX_INFO, __HERE__);
160 #define vxh_create_vx_info(v,l) \
161         __vxh_long(v,l, VXH_CREATE_VX_INFO, __HERE__);
162
163 extern void vxh_dump_history(void);
164
165
166 #else  /* CONFIG_VSERVER_HISTORY */
167
168 #define __HERE__        0
169
170 #define vxh_throw_oops()                do { } while (0)
171
172 #define __vxh_get_vx_info(v,h)          do { } while (0)
173 #define __vxh_put_vx_info(v,h)          do { } while (0)
174
175 #define __vxh_init_vx_info(v,d,h)       do { } while (0)
176 #define __vxh_set_vx_info(v,d,h)        do { } while (0)
177 #define __vxh_clr_vx_info(v,d,h)        do { } while (0)
178
179 #define __vxh_claim_vx_info(v,d,h)      do { } while (0)
180 #define __vxh_release_vx_info(v,d,h)    do { } while (0)
181
182 #define vxh_alloc_vx_info(v)            do { } while (0)
183 #define vxh_dealloc_vx_info(v)          do { } while (0)
184
185 #define vxh_hash_vx_info(v)             do { } while (0)
186 #define vxh_unhash_vx_info(v)           do { } while (0)
187
188 #define vxh_loc_vx_info(a,v)            do { } while (0)
189 #define vxh_lookup_vx_info(a,v)         do { } while (0)
190 #define vxh_create_vx_info(a,v)         do { } while (0)
191
192 #define vxh_dump_history()              do { } while (0)
193
194
195 #endif /* CONFIG_VSERVER_HISTORY */
196
197 #endif /* _VX_HISTORY_H */