Lines Matching refs:nx_ctx
39 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base); in gcm_aes_nx_set_key() local
40 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in gcm_aes_nx_set_key()
41 struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead; in gcm_aes_nx_set_key()
43 nx_ctx_init(nx_ctx, HCOP_FC_AES); in gcm_aes_nx_set_key()
49 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; in gcm_aes_nx_set_key()
54 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_192]; in gcm_aes_nx_set_key()
59 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_256]; in gcm_aes_nx_set_key()
78 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base); in gcm4106_aes_nx_set_key() local
79 char *nonce = nx_ctx->priv.gcm.nonce; in gcm4106_aes_nx_set_key()
124 static int nx_gca(struct nx_crypto_ctx *nx_ctx, in nx_gca() argument
129 struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead; in nx_gca()
131 struct nx_sg *nx_sg = nx_ctx->in_sg; in nx_gca()
147 nx_ctx->ap->sglen); in nx_gca()
149 nx_ctx->ap->databytelen/NX_PAGE_SIZE); in nx_gca()
157 nx_ctx->ap->databytelen); in nx_gca()
161 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, in nx_gca()
169 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_sg) in nx_gca()
172 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op_aead, in nx_gca()
182 atomic_inc(&(nx_ctx->stats->aes_ops)); in nx_gca()
183 atomic64_add(req->assoclen, &(nx_ctx->stats->aes_bytes)); in nx_gca()
196 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); in gmac() local
197 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in gmac()
210 nx_ctx->ap->sglen); in gmac()
212 nx_ctx->ap->databytelen/NX_PAGE_SIZE); in gmac()
223 nx_ctx->ap->databytelen); in gmac()
227 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, in gmac()
235 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_sg) in gmac()
241 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, in gmac()
253 atomic_inc(&(nx_ctx->stats->aes_ops)); in gmac()
254 atomic64_add(req->assoclen, &(nx_ctx->stats->aes_bytes)); in gmac()
269 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); in gcm_empty() local
270 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in gcm_empty()
292 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) desc->info, in gcm_empty()
293 &len, nx_ctx->ap->sglen); in gcm_empty()
299 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *) out, &len, in gcm_empty()
300 nx_ctx->ap->sglen); in gcm_empty()
305 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in gcm_empty()
306 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); in gcm_empty()
308 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, in gcm_empty()
312 atomic_inc(&(nx_ctx->stats->aes_ops)); in gcm_empty()
332 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); in gcm_aes_nx_crypt() local
334 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; in gcm_aes_nx_crypt()
341 spin_lock_irqsave(&nx_ctx->lock, irq_flags); in gcm_aes_nx_crypt()
361 rc = nx_gca(nx_ctx, req, csbcpb->cpb.aes_gcm.in_pat_or_aad); in gcm_aes_nx_crypt()
380 rc = nx_build_sg_lists(nx_ctx, &desc, req->dst, in gcm_aes_nx_crypt()
393 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, in gcm_aes_nx_crypt()
406 atomic_inc(&(nx_ctx->stats->aes_ops)); in gcm_aes_nx_crypt()
408 &(nx_ctx->stats->aes_bytes)); in gcm_aes_nx_crypt()
421 u8 *itag = nx_ctx->priv.gcm.iauth_tag; in gcm_aes_nx_crypt()
432 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); in gcm_aes_nx_crypt()
458 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); in gcm4106_aes_nx_encrypt() local
461 char *nonce = nx_ctx->priv.gcm.nonce; in gcm4106_aes_nx_encrypt()
471 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); in gcm4106_aes_nx_decrypt() local
474 char *nonce = nx_ctx->priv.gcm.nonce; in gcm4106_aes_nx_decrypt()