5 #warning config options missing
8 #define VXD_CBIT(n,m) (vx_debug_ ## n & (1 << (m)))
9 #define VXD_CMIN(n,m) (vx_debug_ ## n > (m))
10 #define VXD_MASK(n,m) (vx_debug_ ## n & (m))
12 #define VXD_QPOS(v,p) (((uint32_t)(v) >> ((p)*8)) & 0xFF)
13 #define VXD_QUAD(v) VXD_QPOS(v,0), VXD_QPOS(v,1), \
14 VXD_QPOS(v,2), VXD_QPOS(v,3)
16 #define __FUNC__ __func__
19 #ifdef CONFIG_VSERVER_DEBUG
21 extern unsigned int vx_debug_switch;
22 extern unsigned int vx_debug_xid;
23 extern unsigned int vx_debug_nid;
24 extern unsigned int vx_debug_net;
25 extern unsigned int vx_debug_limit;
26 extern unsigned int vx_debug_dlim;
27 extern unsigned int vx_debug_cvirt;
30 #define VX_LOGLEVEL "vxD: "
31 #define VX_WARNLEVEL KERN_WARNING "vxW: "
33 #define vxdprintk(c,f,x...) \
36 printk(VX_LOGLEVEL f "\n" , ##x); \
39 #define vxlprintk(c,f,x...) \
42 printk(VX_LOGLEVEL f " @%s:%d\n", x); \
45 #define vxfprintk(c,f,x...) \
48 printk(VX_LOGLEVEL f " %s@%s:%d\n", x); \
52 #define vxwprintk(c,f,x...) \
55 printk(VX_WARNLEVEL f "\n" , ##x); \
59 #define vxd_path(d,m) \
60 ({ static char _buffer[PATH_MAX]; \
61 d_path((d), (m), _buffer, sizeof(_buffer)); })
63 #else /* CONFIG_VSERVER_DEBUG */
65 #define vx_debug_switch 0
66 #define vx_debug_xid 0
67 #define vx_debug_nid 0
68 #define vx_debug_net 0
69 #define vx_debug_limit 0
70 #define vx_debug_dlim 0
71 #define vx_debug_cvirt 0
73 #define vxdprintk(x...) do { } while (0)
74 #define vxlprintk(x...) do { } while (0)
75 #define vxfprintk(x...) do { } while (0)
76 #define vxwprintk(x...) do { } while (0)
78 #define vxd_path "<none>"
80 #endif /* CONFIG_VSERVER_DEBUG */
85 #ifdef CONFIG_VSERVER_HISTORY
88 extern unsigned volatile int vxh_active;
97 struct _vxhe_set_clr {
101 struct _vxhe_loc_lookup {
125 struct _vx_hist_entry {
129 struct _vxhe_vxi vxi;
131 struct _vxhe_set_clr sc;
132 struct _vxhe_loc_lookup ll;
136 struct _vx_hist_entry *vxh_advance(void *loc);
145 static inline void __vxh_copy_vxi(struct _vx_hist_entry *entry, struct vx_info *vxi)
147 entry->vxi.ptr = vxi;
149 entry->vxi.usecnt = atomic_read(&vxi->vx_usecnt);
150 entry->vxi.tasks = atomic_read(&vxi->vx_tasks);
151 entry->vxi.xid = vxi->vx_id;
156 #define __VXH_BODY(__type, __data) \
157 struct _vx_hist_entry *entry; \
160 entry = vxh_advance(VXH_HERE()); \
162 entry->type = __type; \
167 #define __VXH_SIMPLE \
168 __vxh_copy_vxi(entry, vxi)
170 #define VXH_SIMPLE(__name, __type) \
171 static inline void __name(struct vx_info *vxi) \
173 __VXH_BODY(__type, __VXH_SIMPLE) \
176 /* pass vxi and data (void *) */
178 __vxh_copy_vxi(entry, vxi); \
179 entry->sc.data = data
181 #define VXH_DATA(__name, __type) \
183 void __name(struct vx_info *vxi, void *data) \
185 __VXH_BODY(__type, __VXH_DATA) \
188 /* pass vxi and arg (long) */
190 __vxh_copy_vxi(entry, vxi); \
193 #define VXH_LARG(__name, __type) \
195 void __name(struct vx_info *vxi, long arg) \
197 __VXH_BODY(__type, __VXH_LARG) \
201 static inline void vxh_throw_oops(void)
203 __VXH_BODY(VXH_THROW_OOPS, {});
204 /* prevent further acquisition */
208 VXH_SIMPLE(vxh_get_vx_info, VXH_GET_VX_INFO);
209 VXH_SIMPLE(vxh_put_vx_info, VXH_PUT_VX_INFO);
211 VXH_DATA(vxh_init_vx_info, VXH_INIT_VX_INFO);
212 VXH_DATA(vxh_set_vx_info, VXH_SET_VX_INFO);
213 VXH_DATA(vxh_clr_vx_info, VXH_CLR_VX_INFO);
215 VXH_DATA(vxh_claim_vx_info, VXH_CLAIM_VX_INFO);
216 VXH_DATA(vxh_release_vx_info, VXH_RELEASE_VX_INFO);
218 VXH_SIMPLE(vxh_alloc_vx_info, VXH_ALLOC_VX_INFO);
219 VXH_SIMPLE(vxh_dealloc_vx_info, VXH_DEALLOC_VX_INFO);
221 VXH_SIMPLE(vxh_hash_vx_info, VXH_HASH_VX_INFO);
222 VXH_SIMPLE(vxh_unhash_vx_info, VXH_UNHASH_VX_INFO);
224 VXH_LARG(vxh_loc_vx_info, VXH_LOC_VX_INFO);
225 VXH_LARG(vxh_lookup_vx_info, VXH_LOOKUP_VX_INFO);
226 VXH_LARG(vxh_create_vx_info, VXH_CREATE_VX_INFO);
228 extern void vxh_dump_history(void);
231 #else /* CONFIG_VSERVER_HISTORY */
234 #define vxh_throw_oops() do { } while (0)
236 #define vxh_get_vx_info(v) do { } while (0)
237 #define vxh_put_vx_info(v) do { } while (0)
239 #define vxh_init_vx_info(v,d) do { } while (0)
240 #define vxh_set_vx_info(v,d) do { } while (0)
241 #define vxh_clr_vx_info(v,d) do { } while (0)
243 #define vxh_claim_vx_info(v,d) do { } while (0)
244 #define vxh_release_vx_info(v,d) do { } while (0)
246 #define vxh_alloc_vx_info(v) do { } while (0)
247 #define vxh_dealloc_vx_info(v) do { } while (0)
249 #define vxh_hash_vx_info(v) do { } while (0)
250 #define vxh_unhash_vx_info(v) do { } while (0)
252 #define vxh_loc_vx_info(a,v) do { } while (0)
253 #define vxh_lookup_vx_info(a,v) do { } while (0)
254 #define vxh_create_vx_info(a,v) do { } while (0)
256 #define vxh_dump_history() do { } while (0)
259 #endif /* CONFIG_VSERVER_HISTORY */
262 #ifdef CONFIG_VSERVER_DEBUG
263 #define vxd_assert_lock(l) assert_spin_locked(l)
265 #define vxd_assert_lock(l) do { } while (0)
269 #endif /* _VX_DEBUG_H */