((__u32)(mech) | \
((__u32)(svc) << (FLVR_SVC_OFFSET - FLVR_MECH_OFFSET)))
+#define SPTLRPC_SUBFLVR_GSSNULL \
+ MAKE_BASE_SUBFLVR(SPTLRPC_MECH_GSS_NULL, SPTLRPC_SVC_NULL)
#define SPTLRPC_SUBFLVR_KRB5N \
MAKE_BASE_SUBFLVR(SPTLRPC_MECH_GSS_KRB5, SPTLRPC_SVC_NULL)
#define SPTLRPC_SUBFLVR_KRB5A \
SPTLRPC_SVC_NULL, \
SPTLRPC_BULK_HASH, \
SPTLRPC_BULK_SVC_INTG)
+#define SPTLRPC_FLVR_GSSNULL \
+ MAKE_FLVR(SPTLRPC_POLICY_GSS, \
+ SPTLRPC_MECH_GSS_NULL, \
+ SPTLRPC_SVC_NULL, \
+ SPTLRPC_BULK_DEFAULT, \
+ SPTLRPC_BULK_SVC_NULL)
#define SPTLRPC_FLVR_KRB5N \
MAKE_FLVR(SPTLRPC_POLICY_GSS, \
SPTLRPC_MECH_GSS_KRB5, \
PTLRPC_CTX_ERROR)
struct ptlrpc_cli_ctx {
- cfs_hlist_node_t cc_cache; /* linked into ctx cache */
- cfs_atomic_t cc_refcount;
- struct ptlrpc_sec *cc_sec;
- struct ptlrpc_ctx_ops *cc_ops;
+ cfs_hlist_node_t cc_cache; /* linked into ctx cache */
+ atomic_t cc_refcount;
+ struct ptlrpc_sec *cc_sec;
+ struct ptlrpc_ctx_ops *cc_ops;
cfs_time_t cc_expire; /* in seconds */
unsigned int cc_early_expire:1;
unsigned long cc_flags;
* \see sptlrpc_import_sec_adapt().
*/
struct ptlrpc_sec {
- struct ptlrpc_sec_policy *ps_policy;
- cfs_atomic_t ps_refcount;
- /** statistic only */
- cfs_atomic_t ps_nctx;
- /** unique identifier */
- int ps_id;
+ struct ptlrpc_sec_policy *ps_policy;
+ atomic_t ps_refcount;
+ /** statistic only */
+ atomic_t ps_nctx;
+ /** unique identifier */
+ int ps_id;
struct sptlrpc_flavor ps_flvr;
enum lustre_sec_part ps_part;
/** after set, no more new context will be created */
struct ptlrpc_svc_ctx {
- cfs_atomic_t sc_refcount;
- struct ptlrpc_sec_policy *sc_policy;
+ atomic_t sc_refcount;
+ struct ptlrpc_sec_policy *sc_policy;
};
/*
int sptlrpc_unpack_user_desc(struct lustre_msg *req, int offset, int swabbed);
-#define CFS_CAP_CHOWN_MASK (1 << CAP_CHOWN)
-#define CFS_CAP_SYS_RESOURCE_MASK (1 << CAP_SYS_RESOURCE)
+#define CFS_CAP_CHOWN_MASK (1 << CFS_CAP_CHOWN)
+#define CFS_CAP_SYS_RESOURCE_MASK (1 << CFS_CAP_SYS_RESOURCE)
enum {
LUSTRE_SEC_NONE = 0,