aboutsummaryrefslogtreecommitdiff
path: root/crypto
diff options
context:
space:
mode:
Diffstat (limited to 'crypto')
-rw-r--r--crypto/cryptd.c133
1 files changed, 19 insertions, 114 deletions
diff --git a/crypto/cryptd.c b/crypto/cryptd.c
index 243677bd9..85a9e1b49 100644
--- a/crypto/cryptd.c
+++ b/crypto/cryptd.c
@@ -22,7 +22,6 @@
#include <crypto/internal/aead.h>
#include <crypto/cryptd.h>
#include <crypto/crypto_wq.h>
-#include <linux/atomic.h>
#include <linux/err.h>
#include <linux/init.h>
#include <linux/kernel.h>
@@ -32,7 +31,7 @@
#include <linux/sched.h>
#include <linux/slab.h>
-#define CRYPTD_MAX_CPU_QLEN 1000
+#define CRYPTD_MAX_CPU_QLEN 100
struct cryptd_cpu_queue {
struct crypto_queue queue;
@@ -59,7 +58,6 @@ struct aead_instance_ctx {
};
struct cryptd_blkcipher_ctx {
- atomic_t refcnt;
struct crypto_blkcipher *child;
};
@@ -68,7 +66,6 @@ struct cryptd_blkcipher_request_ctx {
};
struct cryptd_hash_ctx {
- atomic_t refcnt;
struct crypto_shash *child;
};
@@ -78,7 +75,6 @@ struct cryptd_hash_request_ctx {
};
struct cryptd_aead_ctx {
- atomic_t refcnt;
struct crypto_aead *child;
};
@@ -122,29 +118,11 @@ static int cryptd_enqueue_request(struct cryptd_queue *queue,
{
int cpu, err;
struct cryptd_cpu_queue *cpu_queue;
- struct crypto_tfm *tfm;
- atomic_t *refcnt;
- bool may_backlog;
cpu = get_cpu();
cpu_queue = this_cpu_ptr(queue->cpu_queue);
err = crypto_enqueue_request(&cpu_queue->queue, request);
-
- refcnt = crypto_tfm_ctx(request->tfm);
- may_backlog = request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG;
-
- if (err == -EBUSY && !may_backlog)
- goto out_put_cpu;
-
queue_work_on(cpu, kcrypto_wq, &cpu_queue->work);
-
- if (!atomic_read(refcnt))
- goto out_put_cpu;
-
- tfm = request->tfm;
- atomic_inc(refcnt);
-
-out_put_cpu:
put_cpu();
return err;
@@ -229,10 +207,7 @@ static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
unsigned int len))
{
struct cryptd_blkcipher_request_ctx *rctx;
- struct cryptd_blkcipher_ctx *ctx;
- struct crypto_ablkcipher *tfm;
struct blkcipher_desc desc;
- int refcnt;
rctx = ablkcipher_request_ctx(req);
@@ -248,16 +223,9 @@ static void cryptd_blkcipher_crypt(struct ablkcipher_request *req,
req->base.complete = rctx->complete;
out:
- tfm = crypto_ablkcipher_reqtfm(req);
- ctx = crypto_ablkcipher_ctx(tfm);
- refcnt = atomic_read(&ctx->refcnt);
-
local_bh_disable();
rctx->complete(&req->base, err);
local_bh_enable();
-
- if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
- crypto_free_ablkcipher(tfm);
}
static void cryptd_blkcipher_encrypt(struct crypto_async_request *req, int err)
@@ -479,21 +447,6 @@ static int cryptd_hash_enqueue(struct ahash_request *req,
return cryptd_enqueue_request(queue, &req->base);
}
-static void cryptd_hash_complete(struct ahash_request *req, int err)
-{
- struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
- struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
- struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
- int refcnt = atomic_read(&ctx->refcnt);
-
- local_bh_disable();
- rctx->complete(&req->base, err);
- local_bh_enable();
-
- if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
- crypto_free_ahash(tfm);
-}
-
static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
{
struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
@@ -513,7 +466,9 @@ static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
req->base.complete = rctx->complete;
out:
- cryptd_hash_complete(req, err);
+ local_bh_disable();
+ rctx->complete(&req->base, err);
+ local_bh_enable();
}
static int cryptd_hash_init_enqueue(struct ahash_request *req)
@@ -536,7 +491,9 @@ static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
req->base.complete = rctx->complete;
out:
- cryptd_hash_complete(req, err);
+ local_bh_disable();
+ rctx->complete(&req->base, err);
+ local_bh_enable();
}
static int cryptd_hash_update_enqueue(struct ahash_request *req)
@@ -557,7 +514,9 @@ static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
req->base.complete = rctx->complete;
out:
- cryptd_hash_complete(req, err);
+ local_bh_disable();
+ rctx->complete(&req->base, err);
+ local_bh_enable();
}
static int cryptd_hash_final_enqueue(struct ahash_request *req)
@@ -578,7 +537,9 @@ static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
req->base.complete = rctx->complete;
out:
- cryptd_hash_complete(req, err);
+ local_bh_disable();
+ rctx->complete(&req->base, err);
+ local_bh_enable();
}
static int cryptd_hash_finup_enqueue(struct ahash_request *req)
@@ -605,7 +566,9 @@ static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
req->base.complete = rctx->complete;
out:
- cryptd_hash_complete(req, err);
+ local_bh_disable();
+ rctx->complete(&req->base, err);
+ local_bh_enable();
}
static int cryptd_hash_digest_enqueue(struct ahash_request *req)
@@ -703,10 +666,6 @@ static void cryptd_aead_crypt(struct aead_request *req,
int (*crypt)(struct aead_request *req))
{
struct cryptd_aead_request_ctx *rctx;
- struct cryptd_aead_ctx *ctx;
- struct crypto_aead *tfm;
- int refcnt;
-
rctx = aead_request_ctx(req);
if (unlikely(err == -EINPROGRESS))
@@ -714,18 +673,10 @@ static void cryptd_aead_crypt(struct aead_request *req,
aead_request_set_tfm(req, child);
err = crypt( req );
req->base.complete = rctx->complete;
-
out:
- tfm = crypto_aead_reqtfm(req);
- ctx = crypto_aead_ctx(tfm);
- refcnt = atomic_read(&ctx->refcnt);
-
local_bh_disable();
rctx->complete(&req->base, err);
local_bh_enable();
-
- if (err != -EINPROGRESS && refcnt && atomic_dec_and_test(&ctx->refcnt))
- crypto_free_aead(tfm);
}
static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
@@ -907,7 +858,6 @@ struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
u32 type, u32 mask)
{
char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
- struct cryptd_blkcipher_ctx *ctx;
struct crypto_tfm *tfm;
if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
@@ -925,9 +875,6 @@ struct cryptd_ablkcipher *cryptd_alloc_ablkcipher(const char *alg_name,
return ERR_PTR(-EINVAL);
}
- ctx = crypto_tfm_ctx(tfm);
- atomic_set(&ctx->refcnt, 1);
-
return __cryptd_ablkcipher_cast(__crypto_ablkcipher_cast(tfm));
}
EXPORT_SYMBOL_GPL(cryptd_alloc_ablkcipher);
@@ -939,20 +886,9 @@ struct crypto_blkcipher *cryptd_ablkcipher_child(struct cryptd_ablkcipher *tfm)
}
EXPORT_SYMBOL_GPL(cryptd_ablkcipher_child);
-bool cryptd_ablkcipher_queued(struct cryptd_ablkcipher *tfm)
-{
- struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
-
- return atomic_read(&ctx->refcnt) - 1;
-}
-EXPORT_SYMBOL_GPL(cryptd_ablkcipher_queued);
-
void cryptd_free_ablkcipher(struct cryptd_ablkcipher *tfm)
{
- struct cryptd_blkcipher_ctx *ctx = crypto_ablkcipher_ctx(&tfm->base);
-
- if (atomic_dec_and_test(&ctx->refcnt))
- crypto_free_ablkcipher(&tfm->base);
+ crypto_free_ablkcipher(&tfm->base);
}
EXPORT_SYMBOL_GPL(cryptd_free_ablkcipher);
@@ -960,7 +896,6 @@ struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
u32 type, u32 mask)
{
char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
- struct cryptd_hash_ctx *ctx;
struct crypto_ahash *tfm;
if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
@@ -974,9 +909,6 @@ struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
return ERR_PTR(-EINVAL);
}
- ctx = crypto_ahash_ctx(tfm);
- atomic_set(&ctx->refcnt, 1);
-
return __cryptd_ahash_cast(tfm);
}
EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
@@ -996,20 +928,9 @@ struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
}
EXPORT_SYMBOL_GPL(cryptd_shash_desc);
-bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
-{
- struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
-
- return atomic_read(&ctx->refcnt) - 1;
-}
-EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
-
void cryptd_free_ahash(struct cryptd_ahash *tfm)
{
- struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
-
- if (atomic_dec_and_test(&ctx->refcnt))
- crypto_free_ahash(&tfm->base);
+ crypto_free_ahash(&tfm->base);
}
EXPORT_SYMBOL_GPL(cryptd_free_ahash);
@@ -1017,7 +938,6 @@ struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
u32 type, u32 mask)
{
char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
- struct cryptd_aead_ctx *ctx;
struct crypto_aead *tfm;
if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
@@ -1030,10 +950,6 @@ struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
crypto_free_aead(tfm);
return ERR_PTR(-EINVAL);
}
-
- ctx = crypto_aead_ctx(tfm);
- atomic_set(&ctx->refcnt, 1);
-
return __cryptd_aead_cast(tfm);
}
EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
@@ -1046,20 +962,9 @@ struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
}
EXPORT_SYMBOL_GPL(cryptd_aead_child);
-bool cryptd_aead_queued(struct cryptd_aead *tfm)
-{
- struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
-
- return atomic_read(&ctx->refcnt) - 1;
-}
-EXPORT_SYMBOL_GPL(cryptd_aead_queued);
-
void cryptd_free_aead(struct cryptd_aead *tfm)
{
- struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
-
- if (atomic_dec_and_test(&ctx->refcnt))
- crypto_free_aead(&tfm->base);
+ crypto_free_aead(&tfm->base);
}
EXPORT_SYMBOL_GPL(cryptd_free_aead);