]> git.karo-electronics.de Git - karo-tx-linux.git/blobdiff - arch/arm/crypto/ghash-ce-glue.c
crypto: ghash-ce - Fix cryptd reordering
[karo-tx-linux.git] / arch / arm / crypto / ghash-ce-glue.c
index 03a39fe2924648f91c45224c84d1a883e433a6dc..1568cb5cd870507dfb5c9799b7fdf090083b9b03 100644 (file)
@@ -154,30 +154,23 @@ static int ghash_async_init(struct ahash_request *req)
        struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
        struct ahash_request *cryptd_req = ahash_request_ctx(req);
        struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
+       struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
+       struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
 
-       if (!may_use_simd()) {
-               memcpy(cryptd_req, req, sizeof(*req));
-               ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
-               return crypto_ahash_init(cryptd_req);
-       } else {
-               struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
-               struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
-
-               desc->tfm = child;
-               desc->flags = req->base.flags;
-               return crypto_shash_init(desc);
-       }
+       desc->tfm = child;
+       desc->flags = req->base.flags;
+       return crypto_shash_init(desc);
 }
 
 static int ghash_async_update(struct ahash_request *req)
 {
        struct ahash_request *cryptd_req = ahash_request_ctx(req);
+       struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
+       struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
+       struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
 
-       if (!may_use_simd()) {
-               struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
-               struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
-               struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
-
+       if (!may_use_simd() ||
+           (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
                memcpy(cryptd_req, req, sizeof(*req));
                ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
                return crypto_ahash_update(cryptd_req);
@@ -190,12 +183,12 @@ static int ghash_async_update(struct ahash_request *req)
 static int ghash_async_final(struct ahash_request *req)
 {
        struct ahash_request *cryptd_req = ahash_request_ctx(req);
+       struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
+       struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
+       struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
 
-       if (!may_use_simd()) {
-               struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
-               struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
-               struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
-
+       if (!may_use_simd() ||
+           (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
                memcpy(cryptd_req, req, sizeof(*req));
                ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
                return crypto_ahash_final(cryptd_req);
@@ -212,7 +205,8 @@ static int ghash_async_digest(struct ahash_request *req)
        struct ahash_request *cryptd_req = ahash_request_ctx(req);
        struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
 
-       if (!may_use_simd()) {
+       if (!may_use_simd() ||
+           (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
                memcpy(cryptd_req, req, sizeof(*req));
                ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
                return crypto_ahash_digest(cryptd_req);