root/crypto/api.c

/* [<][>][^][v][top][bottom][index][help] */

DEFINITIONS

This source file includes following definitions.
  1. crypto_mod_get
  2. crypto_mod_put
  3. crypto_is_test_larval
  4. __crypto_alg_lookup
  5. crypto_larval_destroy
  6. crypto_larval_alloc
  7. crypto_larval_add
  8. crypto_larval_kill
  9. crypto_larval_wait
  10. crypto_alg_lookup
  11. crypto_larval_lookup
  12. crypto_probing_notify
  13. crypto_alg_mod_lookup
  14. crypto_init_ops
  15. crypto_exit_ops
  16. crypto_ctxsize
  17. crypto_shoot_alg
  18. __crypto_alloc_tfm
  19. crypto_alloc_base
  20. crypto_create_tfm
  21. crypto_find_alg
  22. crypto_alloc_tfm
  23. crypto_destroy_tfm
  24. crypto_has_alg
  25. crypto_req_done

   1 // SPDX-License-Identifier: GPL-2.0-or-later
   2 /*
   3  * Scatterlist Cryptographic API.
   4  *
   5  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   6  * Copyright (c) 2002 David S. Miller (davem@redhat.com)
   7  * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
   8  *
   9  * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
  10  * and Nettle, by Niels Möller.
  11  */
  12 
  13 #include <linux/err.h>
  14 #include <linux/errno.h>
  15 #include <linux/kernel.h>
  16 #include <linux/kmod.h>
  17 #include <linux/module.h>
  18 #include <linux/param.h>
  19 #include <linux/sched/signal.h>
  20 #include <linux/slab.h>
  21 #include <linux/string.h>
  22 #include <linux/completion.h>
  23 #include "internal.h"
  24 
  25 LIST_HEAD(crypto_alg_list);
  26 EXPORT_SYMBOL_GPL(crypto_alg_list);
  27 DECLARE_RWSEM(crypto_alg_sem);
  28 EXPORT_SYMBOL_GPL(crypto_alg_sem);
  29 
  30 BLOCKING_NOTIFIER_HEAD(crypto_chain);
  31 EXPORT_SYMBOL_GPL(crypto_chain);
  32 
  33 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
  34 
  35 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
  36 {
  37         return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
  38 }
  39 EXPORT_SYMBOL_GPL(crypto_mod_get);
  40 
  41 void crypto_mod_put(struct crypto_alg *alg)
  42 {
  43         struct module *module = alg->cra_module;
  44 
  45         crypto_alg_put(alg);
  46         module_put(module);
  47 }
  48 EXPORT_SYMBOL_GPL(crypto_mod_put);
  49 
  50 static inline int crypto_is_test_larval(struct crypto_larval *larval)
  51 {
  52         return larval->alg.cra_driver_name[0];
  53 }
  54 
  55 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
  56                                               u32 mask)
  57 {
  58         struct crypto_alg *q, *alg = NULL;
  59         int best = -2;
  60 
  61         list_for_each_entry(q, &crypto_alg_list, cra_list) {
  62                 int exact, fuzzy;
  63 
  64                 if (crypto_is_moribund(q))
  65                         continue;
  66 
  67                 if ((q->cra_flags ^ type) & mask)
  68                         continue;
  69 
  70                 if (crypto_is_larval(q) &&
  71                     !crypto_is_test_larval((struct crypto_larval *)q) &&
  72                     ((struct crypto_larval *)q)->mask != mask)
  73                         continue;
  74 
  75                 exact = !strcmp(q->cra_driver_name, name);
  76                 fuzzy = !strcmp(q->cra_name, name);
  77                 if (!exact && !(fuzzy && q->cra_priority > best))
  78                         continue;
  79 
  80                 if (unlikely(!crypto_mod_get(q)))
  81                         continue;
  82 
  83                 best = q->cra_priority;
  84                 if (alg)
  85                         crypto_mod_put(alg);
  86                 alg = q;
  87 
  88                 if (exact)
  89                         break;
  90         }
  91 
  92         return alg;
  93 }
  94 
  95 static void crypto_larval_destroy(struct crypto_alg *alg)
  96 {
  97         struct crypto_larval *larval = (void *)alg;
  98 
  99         BUG_ON(!crypto_is_larval(alg));
 100         if (!IS_ERR_OR_NULL(larval->adult))
 101                 crypto_mod_put(larval->adult);
 102         kfree(larval);
 103 }
 104 
 105 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
 106 {
 107         struct crypto_larval *larval;
 108 
 109         larval = kzalloc(sizeof(*larval), GFP_KERNEL);
 110         if (!larval)
 111                 return ERR_PTR(-ENOMEM);
 112 
 113         larval->mask = mask;
 114         larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
 115         larval->alg.cra_priority = -1;
 116         larval->alg.cra_destroy = crypto_larval_destroy;
 117 
 118         strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
 119         init_completion(&larval->completion);
 120 
 121         return larval;
 122 }
 123 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
 124 
 125 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
 126                                             u32 mask)
 127 {
 128         struct crypto_alg *alg;
 129         struct crypto_larval *larval;
 130 
 131         larval = crypto_larval_alloc(name, type, mask);
 132         if (IS_ERR(larval))
 133                 return ERR_CAST(larval);
 134 
 135         refcount_set(&larval->alg.cra_refcnt, 2);
 136 
 137         down_write(&crypto_alg_sem);
 138         alg = __crypto_alg_lookup(name, type, mask);
 139         if (!alg) {
 140                 alg = &larval->alg;
 141                 list_add(&alg->cra_list, &crypto_alg_list);
 142         }
 143         up_write(&crypto_alg_sem);
 144 
 145         if (alg != &larval->alg) {
 146                 kfree(larval);
 147                 if (crypto_is_larval(alg))
 148                         alg = crypto_larval_wait(alg);
 149         }
 150 
 151         return alg;
 152 }
 153 
 154 void crypto_larval_kill(struct crypto_alg *alg)
 155 {
 156         struct crypto_larval *larval = (void *)alg;
 157 
 158         down_write(&crypto_alg_sem);
 159         list_del(&alg->cra_list);
 160         up_write(&crypto_alg_sem);
 161         complete_all(&larval->completion);
 162         crypto_alg_put(alg);
 163 }
 164 EXPORT_SYMBOL_GPL(crypto_larval_kill);
 165 
 166 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
 167 {
 168         struct crypto_larval *larval = (void *)alg;
 169         long timeout;
 170 
 171         timeout = wait_for_completion_killable_timeout(
 172                 &larval->completion, 60 * HZ);
 173 
 174         alg = larval->adult;
 175         if (timeout < 0)
 176                 alg = ERR_PTR(-EINTR);
 177         else if (!timeout)
 178                 alg = ERR_PTR(-ETIMEDOUT);
 179         else if (!alg)
 180                 alg = ERR_PTR(-ENOENT);
 181         else if (IS_ERR(alg))
 182                 ;
 183         else if (crypto_is_test_larval(larval) &&
 184                  !(alg->cra_flags & CRYPTO_ALG_TESTED))
 185                 alg = ERR_PTR(-EAGAIN);
 186         else if (!crypto_mod_get(alg))
 187                 alg = ERR_PTR(-EAGAIN);
 188         crypto_mod_put(&larval->alg);
 189 
 190         return alg;
 191 }
 192 
 193 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
 194                                             u32 mask)
 195 {
 196         struct crypto_alg *alg;
 197         u32 test = 0;
 198 
 199         if (!((type | mask) & CRYPTO_ALG_TESTED))
 200                 test |= CRYPTO_ALG_TESTED;
 201 
 202         down_read(&crypto_alg_sem);
 203         alg = __crypto_alg_lookup(name, type | test, mask | test);
 204         if (!alg && test) {
 205                 alg = __crypto_alg_lookup(name, type, mask);
 206                 if (alg && !crypto_is_larval(alg)) {
 207                         /* Test failed */
 208                         crypto_mod_put(alg);
 209                         alg = ERR_PTR(-ELIBBAD);
 210                 }
 211         }
 212         up_read(&crypto_alg_sem);
 213 
 214         return alg;
 215 }
 216 
 217 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
 218                                                u32 mask)
 219 {
 220         struct crypto_alg *alg;
 221 
 222         if (!name)
 223                 return ERR_PTR(-ENOENT);
 224 
 225         type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
 226         mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
 227 
 228         alg = crypto_alg_lookup(name, type, mask);
 229         if (!alg && !(mask & CRYPTO_NOLOAD)) {
 230                 request_module("crypto-%s", name);
 231 
 232                 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
 233                       CRYPTO_ALG_NEED_FALLBACK))
 234                         request_module("crypto-%s-all", name);
 235 
 236                 alg = crypto_alg_lookup(name, type, mask);
 237         }
 238 
 239         if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
 240                 alg = crypto_larval_wait(alg);
 241         else if (!alg)
 242                 alg = crypto_larval_add(name, type, mask);
 243 
 244         return alg;
 245 }
 246 
 247 int crypto_probing_notify(unsigned long val, void *v)
 248 {
 249         int ok;
 250 
 251         ok = blocking_notifier_call_chain(&crypto_chain, val, v);
 252         if (ok == NOTIFY_DONE) {
 253                 request_module("cryptomgr");
 254                 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
 255         }
 256 
 257         return ok;
 258 }
 259 EXPORT_SYMBOL_GPL(crypto_probing_notify);
 260 
 261 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
 262 {
 263         struct crypto_alg *alg;
 264         struct crypto_alg *larval;
 265         int ok;
 266 
 267         /*
 268          * If the internal flag is set for a cipher, require a caller to
 269          * to invoke the cipher with the internal flag to use that cipher.
 270          * Also, if a caller wants to allocate a cipher that may or may
 271          * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
 272          * !(mask & CRYPTO_ALG_INTERNAL).
 273          */
 274         if (!((type | mask) & CRYPTO_ALG_INTERNAL))
 275                 mask |= CRYPTO_ALG_INTERNAL;
 276 
 277         larval = crypto_larval_lookup(name, type, mask);
 278         if (IS_ERR(larval) || !crypto_is_larval(larval))
 279                 return larval;
 280 
 281         ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
 282 
 283         if (ok == NOTIFY_STOP)
 284                 alg = crypto_larval_wait(larval);
 285         else {
 286                 crypto_mod_put(larval);
 287                 alg = ERR_PTR(-ENOENT);
 288         }
 289         crypto_larval_kill(larval);
 290         return alg;
 291 }
 292 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
 293 
 294 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
 295 {
 296         const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
 297 
 298         if (type_obj)
 299                 return type_obj->init(tfm, type, mask);
 300 
 301         switch (crypto_tfm_alg_type(tfm)) {
 302         case CRYPTO_ALG_TYPE_CIPHER:
 303                 return crypto_init_cipher_ops(tfm);
 304 
 305         case CRYPTO_ALG_TYPE_COMPRESS:
 306                 return crypto_init_compress_ops(tfm);
 307 
 308         default:
 309                 break;
 310         }
 311 
 312         BUG();
 313         return -EINVAL;
 314 }
 315 
 316 static void crypto_exit_ops(struct crypto_tfm *tfm)
 317 {
 318         const struct crypto_type *type = tfm->__crt_alg->cra_type;
 319 
 320         if (type && tfm->exit)
 321                 tfm->exit(tfm);
 322 }
 323 
 324 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
 325 {
 326         const struct crypto_type *type_obj = alg->cra_type;
 327         unsigned int len;
 328 
 329         len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
 330         if (type_obj)
 331                 return len + type_obj->ctxsize(alg, type, mask);
 332 
 333         switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
 334         default:
 335                 BUG();
 336 
 337         case CRYPTO_ALG_TYPE_CIPHER:
 338                 len += crypto_cipher_ctxsize(alg);
 339                 break;
 340 
 341         case CRYPTO_ALG_TYPE_COMPRESS:
 342                 len += crypto_compress_ctxsize(alg);
 343                 break;
 344         }
 345 
 346         return len;
 347 }
 348 
 349 static void crypto_shoot_alg(struct crypto_alg *alg)
 350 {
 351         down_write(&crypto_alg_sem);
 352         alg->cra_flags |= CRYPTO_ALG_DYING;
 353         up_write(&crypto_alg_sem);
 354 }
 355 
 356 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
 357                                       u32 mask)
 358 {
 359         struct crypto_tfm *tfm = NULL;
 360         unsigned int tfm_size;
 361         int err = -ENOMEM;
 362 
 363         tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
 364         tfm = kzalloc(tfm_size, GFP_KERNEL);
 365         if (tfm == NULL)
 366                 goto out_err;
 367 
 368         tfm->__crt_alg = alg;
 369 
 370         err = crypto_init_ops(tfm, type, mask);
 371         if (err)
 372                 goto out_free_tfm;
 373 
 374         if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 375                 goto cra_init_failed;
 376 
 377         goto out;
 378 
 379 cra_init_failed:
 380         crypto_exit_ops(tfm);
 381 out_free_tfm:
 382         if (err == -EAGAIN)
 383                 crypto_shoot_alg(alg);
 384         kfree(tfm);
 385 out_err:
 386         tfm = ERR_PTR(err);
 387 out:
 388         return tfm;
 389 }
 390 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
 391 
 392 /*
 393  *      crypto_alloc_base - Locate algorithm and allocate transform
 394  *      @alg_name: Name of algorithm
 395  *      @type: Type of algorithm
 396  *      @mask: Mask for type comparison
 397  *
 398  *      This function should not be used by new algorithm types.
 399  *      Please use crypto_alloc_tfm instead.
 400  *
 401  *      crypto_alloc_base() will first attempt to locate an already loaded
 402  *      algorithm.  If that fails and the kernel supports dynamically loadable
 403  *      modules, it will then attempt to load a module of the same name or
 404  *      alias.  If that fails it will send a query to any loaded crypto manager
 405  *      to construct an algorithm on the fly.  A refcount is grabbed on the
 406  *      algorithm which is then associated with the new transform.
 407  *
 408  *      The returned transform is of a non-determinate type.  Most people
 409  *      should use one of the more specific allocation functions such as
 410  *      crypto_alloc_blkcipher.
 411  *
 412  *      In case of error the return value is an error pointer.
 413  */
 414 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
 415 {
 416         struct crypto_tfm *tfm;
 417         int err;
 418 
 419         for (;;) {
 420                 struct crypto_alg *alg;
 421 
 422                 alg = crypto_alg_mod_lookup(alg_name, type, mask);
 423                 if (IS_ERR(alg)) {
 424                         err = PTR_ERR(alg);
 425                         goto err;
 426                 }
 427 
 428                 tfm = __crypto_alloc_tfm(alg, type, mask);
 429                 if (!IS_ERR(tfm))
 430                         return tfm;
 431 
 432                 crypto_mod_put(alg);
 433                 err = PTR_ERR(tfm);
 434 
 435 err:
 436                 if (err != -EAGAIN)
 437                         break;
 438                 if (fatal_signal_pending(current)) {
 439                         err = -EINTR;
 440                         break;
 441                 }
 442         }
 443 
 444         return ERR_PTR(err);
 445 }
 446 EXPORT_SYMBOL_GPL(crypto_alloc_base);
 447 
 448 void *crypto_create_tfm(struct crypto_alg *alg,
 449                         const struct crypto_type *frontend)
 450 {
 451         char *mem;
 452         struct crypto_tfm *tfm = NULL;
 453         unsigned int tfmsize;
 454         unsigned int total;
 455         int err = -ENOMEM;
 456 
 457         tfmsize = frontend->tfmsize;
 458         total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
 459 
 460         mem = kzalloc(total, GFP_KERNEL);
 461         if (mem == NULL)
 462                 goto out_err;
 463 
 464         tfm = (struct crypto_tfm *)(mem + tfmsize);
 465         tfm->__crt_alg = alg;
 466 
 467         err = frontend->init_tfm(tfm);
 468         if (err)
 469                 goto out_free_tfm;
 470 
 471         if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
 472                 goto cra_init_failed;
 473 
 474         goto out;
 475 
 476 cra_init_failed:
 477         crypto_exit_ops(tfm);
 478 out_free_tfm:
 479         if (err == -EAGAIN)
 480                 crypto_shoot_alg(alg);
 481         kfree(mem);
 482 out_err:
 483         mem = ERR_PTR(err);
 484 out:
 485         return mem;
 486 }
 487 EXPORT_SYMBOL_GPL(crypto_create_tfm);
 488 
 489 struct crypto_alg *crypto_find_alg(const char *alg_name,
 490                                    const struct crypto_type *frontend,
 491                                    u32 type, u32 mask)
 492 {
 493         if (frontend) {
 494                 type &= frontend->maskclear;
 495                 mask &= frontend->maskclear;
 496                 type |= frontend->type;
 497                 mask |= frontend->maskset;
 498         }
 499 
 500         return crypto_alg_mod_lookup(alg_name, type, mask);
 501 }
 502 EXPORT_SYMBOL_GPL(crypto_find_alg);
 503 
 504 /*
 505  *      crypto_alloc_tfm - Locate algorithm and allocate transform
 506  *      @alg_name: Name of algorithm
 507  *      @frontend: Frontend algorithm type
 508  *      @type: Type of algorithm
 509  *      @mask: Mask for type comparison
 510  *
 511  *      crypto_alloc_tfm() will first attempt to locate an already loaded
 512  *      algorithm.  If that fails and the kernel supports dynamically loadable
 513  *      modules, it will then attempt to load a module of the same name or
 514  *      alias.  If that fails it will send a query to any loaded crypto manager
 515  *      to construct an algorithm on the fly.  A refcount is grabbed on the
 516  *      algorithm which is then associated with the new transform.
 517  *
 518  *      The returned transform is of a non-determinate type.  Most people
 519  *      should use one of the more specific allocation functions such as
 520  *      crypto_alloc_blkcipher.
 521  *
 522  *      In case of error the return value is an error pointer.
 523  */
 524 void *crypto_alloc_tfm(const char *alg_name,
 525                        const struct crypto_type *frontend, u32 type, u32 mask)
 526 {
 527         void *tfm;
 528         int err;
 529 
 530         for (;;) {
 531                 struct crypto_alg *alg;
 532 
 533                 alg = crypto_find_alg(alg_name, frontend, type, mask);
 534                 if (IS_ERR(alg)) {
 535                         err = PTR_ERR(alg);
 536                         goto err;
 537                 }
 538 
 539                 tfm = crypto_create_tfm(alg, frontend);
 540                 if (!IS_ERR(tfm))
 541                         return tfm;
 542 
 543                 crypto_mod_put(alg);
 544                 err = PTR_ERR(tfm);
 545 
 546 err:
 547                 if (err != -EAGAIN)
 548                         break;
 549                 if (fatal_signal_pending(current)) {
 550                         err = -EINTR;
 551                         break;
 552                 }
 553         }
 554 
 555         return ERR_PTR(err);
 556 }
 557 EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
 558 
 559 /*
 560  *      crypto_destroy_tfm - Free crypto transform
 561  *      @mem: Start of tfm slab
 562  *      @tfm: Transform to free
 563  *
 564  *      This function frees up the transform and any associated resources,
 565  *      then drops the refcount on the associated algorithm.
 566  */
 567 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
 568 {
 569         struct crypto_alg *alg;
 570 
 571         if (unlikely(!mem))
 572                 return;
 573 
 574         alg = tfm->__crt_alg;
 575 
 576         if (!tfm->exit && alg->cra_exit)
 577                 alg->cra_exit(tfm);
 578         crypto_exit_ops(tfm);
 579         crypto_mod_put(alg);
 580         kzfree(mem);
 581 }
 582 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
 583 
 584 int crypto_has_alg(const char *name, u32 type, u32 mask)
 585 {
 586         int ret = 0;
 587         struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
 588 
 589         if (!IS_ERR(alg)) {
 590                 crypto_mod_put(alg);
 591                 ret = 1;
 592         }
 593 
 594         return ret;
 595 }
 596 EXPORT_SYMBOL_GPL(crypto_has_alg);
 597 
 598 void crypto_req_done(struct crypto_async_request *req, int err)
 599 {
 600         struct crypto_wait *wait = req->data;
 601 
 602         if (err == -EINPROGRESS)
 603                 return;
 604 
 605         wait->err = err;
 606         complete(&wait->completion);
 607 }
 608 EXPORT_SYMBOL_GPL(crypto_req_done);
 609 
 610 MODULE_DESCRIPTION("Cryptographic core API");
 611 MODULE_LICENSE("GPL");

/* [<][>][^][v][top][bottom][index][help] */