5 /* context state changes */
16 #define MAX_S_CONTEXT 65535 /* Arbitrary limit */
18 #ifdef CONFIG_VSERVER_DYNAMIC_IDS
19 #define MIN_D_CONTEXT 49152 /* dynamic contexts start here */
21 #define MIN_D_CONTEXT 65536
24 /* check conditions */
26 #define VS_ADMIN 0x0001
27 #define VS_WATCH 0x0002
28 #define VS_HIDE 0x0004
29 #define VS_HOSTID 0x0008
31 #define VS_IDENT 0x0010
32 #define VS_EQUIV 0x0020
33 #define VS_PARENT 0x0040
34 #define VS_CHILD 0x0080
36 #define VS_ARG_MASK 0x00F0
38 #define VS_DYNAMIC 0x0100
39 #define VS_STATIC 0x0200
41 #define VS_ATR_MASK 0x0F00
43 #ifdef CONFIG_VSERVER_PRIVACY
44 #define VS_ADMIN_P (0)
45 #define VS_WATCH_P (0)
47 #define VS_ADMIN_P VS_ADMIN
48 #define VS_WATCH_P VS_WATCH
51 #define VS_HARDIRQ 0x1000
52 #define VS_SOFTIRQ 0x2000
55 #define VS_IRQ_MASK 0xF000
57 #include <linux/hardirq.h>
60 * check current context for ADMIN/WATCH and
61 * optionally against supplied argument
63 static inline int __vs_check(int cid, int id, unsigned int mode)
65 if (mode & VS_ARG_MASK) {
66 if ((mode & VS_IDENT) &&
70 if (mode & VS_ATR_MASK) {
71 if ((mode & VS_DYNAMIC) &&
72 (id >= MIN_D_CONTEXT) &&
73 (id <= MAX_S_CONTEXT))
75 if ((mode & VS_STATIC) &&
76 (id > 1) && (id < MIN_D_CONTEXT))
79 if (mode & VS_IRQ_MASK) {
80 if ((mode & VS_IRQ) && unlikely(in_interrupt()))
82 if ((mode & VS_HARDIRQ) && unlikely(in_irq()))
84 if ((mode & VS_SOFTIRQ) && unlikely(in_softirq()))
87 return (((mode & VS_ADMIN) && (cid == 0)) ||
88 ((mode & VS_WATCH) && (cid == 1)) ||
89 ((mode & VS_HOSTID) && (id == 0)));
92 #define vx_task_xid(t) ((t)->xid)
94 #define vx_current_xid() vx_task_xid(current)
96 #define current_vx_info() (current->vx_info)
99 #define vx_check(c,m) __vs_check(vx_current_xid(),c,(m)|VS_IRQ)
101 #define vx_weak_check(c,m) ((m) ? vx_check(c,m) : 1)
104 #define nx_task_nid(t) ((t)->nid)
106 #define nx_current_nid() nx_task_nid(current)
108 #define current_nx_info() (current->nx_info)
111 #define nx_check(c,m) __vs_check(nx_current_nid(),c,m)
113 #define nx_weak_check(c,m) ((m) ? nx_check(c,m) : 1)
117 /* generic flag merging */
119 #define vs_check_flags(v,m,f) (((v) & (m)) ^ (f))
121 #define vs_mask_flags(v,f,m) (((v) & ~(m)) | ((f) & (m)))
123 #define vs_mask_mask(v,f,m) (((v) & ~(m)) | ((v) & (f) & (m)))
125 #define vs_check_bit(v,n) ((v) & (1LL << (n)))
130 #define __vx_flags(v) ((v) ? (v)->vx_flags : 0)
132 #define vx_current_flags() __vx_flags(current->vx_info)
134 #define vx_info_flags(v,m,f) \
135 vs_check_flags(__vx_flags(v),(m),(f))
137 #define task_vx_flags(t,m,f) \
138 ((t) && vx_info_flags((t)->vx_info, (m), (f)))
140 #define vx_flags(m,f) vx_info_flags(current->vx_info,(m),(f))
145 #define __vx_ccaps(v) ((v) ? (v)->vx_ccaps : 0)
147 #define vx_current_ccaps() __vx_ccaps(current->vx_info)
149 #define vx_info_ccaps(v,c) (__vx_ccaps(v) & (c))
151 #define vx_ccaps(c) vx_info_ccaps(current->vx_info,(c))
157 #define __nx_flags(v) ((v) ? (v)->nx_flags : 0)
159 #define nx_current_flags() __nx_flags(current->nx_info)
161 #define nx_info_flags(v,m,f) \
162 vs_check_flags(__nx_flags(v),(m),(f))
164 #define task_nx_flags(t,m,f) \
165 ((t) && nx_info_flags((t)->nx_info, (m), (f)))
167 #define nx_flags(m,f) nx_info_flags(current->nx_info,(m),(f))
172 #define __nx_ncaps(v) ((v) ? (v)->nx_ncaps : 0)
174 #define nx_current_ncaps() __nx_ncaps(current->nx_info)
176 #define nx_info_ncaps(v,c) (__nx_ncaps(v) & (c))
178 #define nx_ncaps(c) nx_info_ncaps(current->nx_info,(c))
181 /* context mask capabilities */
183 #define __vx_mcaps(v) ((v) ? (v)->vx_ccaps >> 32UL : ~0 )
185 #define vx_info_mcaps(v,c) (__vx_mcaps(v) & (c))
187 #define vx_mcaps(c) vx_info_mcaps(current->vx_info,(c))
190 /* context bcap mask */
192 #define __vx_bcaps(v) ((v) ? (v)->vx_bcaps : ~0 )
194 #define vx_current_bcaps() __vx_bcaps(current->vx_info)
196 #define vx_info_bcaps(v,c) (__vx_bcaps(v) & (c))
198 #define vx_bcaps(c) vx_info_bcaps(current->vx_info,(c))
201 #define vx_info_cap_bset(v) ((v) ? (v)->vx_cap_bset : cap_bset)
203 #define vx_current_cap_bset() vx_info_cap_bset(current->vx_info)
206 #define __vx_info_mbcap(v,b) \
207 (!vx_info_flags(v, VXF_STATE_SETUP, 0) ? \
208 vx_info_bcaps(v, b) : (b))
210 #define vx_info_mbcap(v,b) __vx_info_mbcap(v,cap_t(b))
212 #define task_vx_mbcap(t,b) \
213 vx_info_mbcap((t)->vx_info, (t)->b)
215 #define vx_mbcap(b) task_vx_mbcap(current,b)
217 #define vx_cap_raised(v,c,f) (vx_info_mbcap(v,c) & CAP_TO_MASK(f))
219 #define vx_capable(b,c) (capable(b) || \
220 (cap_raised(current->cap_effective,b) && vx_ccaps(c)))
223 #define vx_current_initpid(n) \
224 (current->vx_info && \
225 (current->vx_info->vx_initpid == (n)))
228 #define __vx_state(v) ((v) ? ((v)->vx_state) : 0)
230 #define vx_info_state(v,m) (__vx_state(v) & (m))
233 #define __nx_state(v) ((v) ? ((v)->nx_state) : 0)
235 #define nx_info_state(v,m) (__nx_state(v) & (m))