5 #warning config options missing
8 #define VXD_CBIT(n,m) (vx_debug_ ## n & (1 << (m)))
9 #define VXD_CMIN(n,m) (vx_debug_ ## n > (m))
10 #define VXD_MASK(n,m) (vx_debug_ ## n & (m))
12 #define VXD_QPOS(v,p) (((uint32_t)(v) >> ((p)*8)) & 0xFF)
13 #define VXD_QUAD(v) VXD_QPOS(v,0), VXD_QPOS(v,1), \
14 VXD_QPOS(v,2), VXD_QPOS(v,3)
16 #define __FUNC__ __func__
19 #ifdef CONFIG_VSERVER_DEBUG
21 extern unsigned int vx_debug_switch;
22 extern unsigned int vx_debug_xid;
23 extern unsigned int vx_debug_nid;
24 extern unsigned int vx_debug_net;
25 extern unsigned int vx_debug_limit;
26 extern unsigned int vx_debug_dlim;
27 extern unsigned int vx_debug_cvirt;
30 #define VX_LOGLEVEL "vxD: "
31 #define VX_WARNLEVEL KERN_WARNING "vxW: "
33 #define vxdprintk(c,f,x...) \
36 printk(VX_LOGLEVEL f "\n" , ##x); \
39 #define vxlprintk(c,f,x...) \
42 printk(VX_LOGLEVEL f " @%s:%d\n", x); \
45 #define vxfprintk(c,f,x...) \
48 printk(VX_LOGLEVEL f " %s@%s:%d\n", x); \
52 #define vxwprintk(c,f,x...) \
55 printk(VX_WARNLEVEL f "\n" , ##x); \
59 #define vxd_path(d,m) \
60 ({ static char _buffer[PATH_MAX]; \
61 d_path((d), (m), _buffer, sizeof(_buffer)); })
63 #else /* CONFIG_VSERVER_DEBUG */
65 #define vx_debug_switch 0
66 #define vx_debug_xid 0
67 #define vx_debug_nid 0
68 #define vx_debug_net 0
69 #define vx_debug_limit 0
70 #define vx_debug_dlim 0
71 #define vx_debug_cvirt 0
73 #define vxdprintk(x...) do { } while (0)
74 #define vxlprintk(x...) do { } while (0)
75 #define vxfprintk(x...) do { } while (0)
76 #define vxwprintk(x...) do { } while (0)
78 #define vxd_path "<none>"
80 #endif /* CONFIG_VSERVER_DEBUG */
85 #ifdef CONFIG_VSERVER_HISTORY
88 extern unsigned volatile int vxh_active;
97 struct _vxhe_set_clr {
101 struct _vxhe_loc_lookup {
125 struct _vx_hist_entry {
129 struct _vxhe_vxi vxi;
131 struct _vxhe_set_clr sc;
132 struct _vxhe_loc_lookup ll;
136 struct _vx_hist_entry *vxh_advance(void *loc);
149 static inline void __vxh_copy_vxi(struct _vx_hist_entry *entry, struct vx_info *vxi)
151 entry->vxi.ptr = vxi;
153 entry->vxi.usecnt = atomic_read(&vxi->vx_usecnt);
154 entry->vxi.tasks = atomic_read(&vxi->vx_tasks);
155 entry->vxi.xid = vxi->vx_id;
160 #define __VXH_BODY(__type, __data) \
161 struct _vx_hist_entry *entry; \
164 entry = vxh_advance(VXH_HERE()); \
166 entry->type = __type; \
171 #define __VXH_SIMPLE \
172 __vxh_copy_vxi(entry, vxi)
174 #define VXH_SIMPLE(__name, __type) \
175 static inline void __name(struct vx_info *vxi) \
177 __VXH_BODY(__type, __VXH_SIMPLE) \
180 /* pass vxi and data (void *) */
182 __vxh_copy_vxi(entry, vxi); \
183 entry->sc.data = data
185 #define VXH_DATA(__name, __type) \
187 void __name(struct vx_info *vxi, void *data) \
189 __VXH_BODY(__type, __VXH_DATA) \
192 /* pass vxi and arg (long) */
194 __vxh_copy_vxi(entry, vxi); \
197 #define VXH_LARG(__name, __type) \
199 void __name(struct vx_info *vxi, long arg) \
201 __VXH_BODY(__type, __VXH_LARG) \
205 static inline void vxh_throw_oops(void)
207 __VXH_BODY(VXH_THROW_OOPS, {});
208 /* prevent further acquisition */
212 VXH_SIMPLE(vxh_get_vx_info, VXH_GET_VX_INFO);
213 VXH_SIMPLE(vxh_put_vx_info, VXH_PUT_VX_INFO);
215 VXH_DATA(vxh_init_vx_info, VXH_INIT_VX_INFO);
216 VXH_DATA(vxh_set_vx_info, VXH_SET_VX_INFO);
217 VXH_DATA(vxh_clr_vx_info, VXH_CLR_VX_INFO);
219 VXH_DATA(vxh_claim_vx_info, VXH_CLAIM_VX_INFO);
220 VXH_DATA(vxh_release_vx_info, VXH_RELEASE_VX_INFO);
222 VXH_SIMPLE(vxh_alloc_vx_info, VXH_ALLOC_VX_INFO);
223 VXH_SIMPLE(vxh_dealloc_vx_info, VXH_DEALLOC_VX_INFO);
225 VXH_SIMPLE(vxh_hash_vx_info, VXH_HASH_VX_INFO);
226 VXH_SIMPLE(vxh_unhash_vx_info, VXH_UNHASH_VX_INFO);
228 VXH_LARG(vxh_loc_vx_info, VXH_LOC_VX_INFO);
229 VXH_LARG(vxh_lookup_vx_info, VXH_LOOKUP_VX_INFO);
230 VXH_LARG(vxh_create_vx_info, VXH_CREATE_VX_INFO);
232 extern void vxh_dump_history(void);
235 #else /* CONFIG_VSERVER_HISTORY */
238 #define vxh_throw_oops() do { } while (0)
240 #define vxh_get_vx_info(v) do { } while (0)
241 #define vxh_put_vx_info(v) do { } while (0)
243 #define vxh_init_vx_info(v,d) do { } while (0)
244 #define vxh_set_vx_info(v,d) do { } while (0)
245 #define vxh_clr_vx_info(v,d) do { } while (0)
247 #define vxh_claim_vx_info(v,d) do { } while (0)
248 #define vxh_release_vx_info(v,d) do { } while (0)
250 #define vxh_alloc_vx_info(v) do { } while (0)
251 #define vxh_dealloc_vx_info(v) do { } while (0)
253 #define vxh_hash_vx_info(v) do { } while (0)
254 #define vxh_unhash_vx_info(v) do { } while (0)
256 #define vxh_loc_vx_info(a,v) do { } while (0)
257 #define vxh_lookup_vx_info(a,v) do { } while (0)
258 #define vxh_create_vx_info(a,v) do { } while (0)
260 #define vxh_dump_history() do { } while (0)
263 #endif /* CONFIG_VSERVER_HISTORY */
266 #ifdef CONFIG_VSERVER_DEBUG
267 #define vxd_assert_lock(l) assert_spin_locked(l)
269 #define vxd_assert_lock(l) do { } while (0)
273 #endif /* _VX_DEBUG_H */