Refreshed all patches. Remove upstreamed: - 302-0002-dmaengine-dw-implement-per-channel-protection-contro.patch Fixes: - CVE-2019-19332 Compile-tested on: cns3xxx Runtime-tested on: cns3xxx Signed-off-by: Koen Vandeputte <koen.vandeputte@ncentric.com>
		
			
				
	
	
		
			248 lines
		
	
	
		
			7.9 KiB
		
	
	
	
		
			Diff
		
	
	
	
	
	
			
		
		
	
	
			248 lines
		
	
	
		
			7.9 KiB
		
	
	
	
		
			Diff
		
	
	
	
	
	
From 98e87e3d933b8e504ea41b8857c038d2cd06cddc Mon Sep 17 00:00:00 2001
 | 
						|
From: Christian Lamparter <chunkeey@gmail.com>
 | 
						|
Date: Thu, 19 Apr 2018 18:41:54 +0200
 | 
						|
Subject: [PATCH 5/8] crypto: crypto4xx - add aes-ctr support
 | 
						|
 | 
						|
This patch adds support for the aes-ctr skcipher.
 | 
						|
 | 
						|
name         : ctr(aes)
 | 
						|
driver       : ctr-aes-ppc4xx
 | 
						|
module       : crypto4xx
 | 
						|
priority     : 300
 | 
						|
refcnt       : 1
 | 
						|
selftest     : passed
 | 
						|
internal     : no
 | 
						|
type         : skcipher
 | 
						|
async        : yes
 | 
						|
blocksize    : 16
 | 
						|
min keysize  : 16
 | 
						|
max keysize  : 32
 | 
						|
ivsize       : 16
 | 
						|
chunksize    : 16
 | 
						|
walksize     : 16
 | 
						|
 | 
						|
The hardware uses only the last 32-bits as the counter while the
 | 
						|
kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
 | 
						|
the whole IV is a counter. To make this work, the driver will
 | 
						|
fallback if the counter is going to overlow.
 | 
						|
 | 
						|
The aead's crypto4xx_setup_fallback() function is renamed to
 | 
						|
crypto4xx_aead_setup_fallback.
 | 
						|
 | 
						|
Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
 | 
						|
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
 | 
						|
---
 | 
						|
 drivers/crypto/amcc/crypto4xx_alg.c  | 91 ++++++++++++++++++++++++++--
 | 
						|
 drivers/crypto/amcc/crypto4xx_core.c | 37 +++++++++++
 | 
						|
 drivers/crypto/amcc/crypto4xx_core.h |  5 ++
 | 
						|
 3 files changed, 127 insertions(+), 6 deletions(-)
 | 
						|
 | 
						|
--- a/drivers/crypto/amcc/crypto4xx_alg.c
 | 
						|
+++ b/drivers/crypto/amcc/crypto4xx_alg.c
 | 
						|
@@ -241,6 +241,85 @@ int crypto4xx_rfc3686_decrypt(struct skc
 | 
						|
 				  ctx->sa_out, ctx->sa_len, 0);
 | 
						|
 }
 | 
						|
 
 | 
						|
+static int
 | 
						|
+crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
 | 
						|
+{
 | 
						|
+	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
 | 
						|
+	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
 | 
						|
+	size_t iv_len = crypto_skcipher_ivsize(cipher);
 | 
						|
+	unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4));
 | 
						|
+	unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
 | 
						|
+			AES_BLOCK_SIZE;
 | 
						|
+
 | 
						|
+	/*
 | 
						|
+	 * The hardware uses only the last 32-bits as the counter while the
 | 
						|
+	 * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
 | 
						|
+	 * the whole IV is a counter.  So fallback if the counter is going to
 | 
						|
+	 * overlow.
 | 
						|
+	 */
 | 
						|
+	if (counter + nblks < counter) {
 | 
						|
+		struct skcipher_request *subreq = skcipher_request_ctx(req);
 | 
						|
+		int ret;
 | 
						|
+
 | 
						|
+		skcipher_request_set_tfm(subreq, ctx->sw_cipher.cipher);
 | 
						|
+		skcipher_request_set_callback(subreq, req->base.flags,
 | 
						|
+			NULL, NULL);
 | 
						|
+		skcipher_request_set_crypt(subreq, req->src, req->dst,
 | 
						|
+			req->cryptlen, req->iv);
 | 
						|
+		ret = encrypt ? crypto_skcipher_encrypt(subreq)
 | 
						|
+			: crypto_skcipher_decrypt(subreq);
 | 
						|
+		skcipher_request_zero(subreq);
 | 
						|
+		return ret;
 | 
						|
+	}
 | 
						|
+
 | 
						|
+	return encrypt ? crypto4xx_encrypt_iv(req)
 | 
						|
+		       : crypto4xx_decrypt_iv(req);
 | 
						|
+}
 | 
						|
+
 | 
						|
+static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
 | 
						|
+				       struct crypto_skcipher *cipher,
 | 
						|
+				       const u8 *key,
 | 
						|
+				       unsigned int keylen)
 | 
						|
+{
 | 
						|
+	int rc;
 | 
						|
+
 | 
						|
+	crypto_skcipher_clear_flags(ctx->sw_cipher.cipher,
 | 
						|
+				    CRYPTO_TFM_REQ_MASK);
 | 
						|
+	crypto_skcipher_set_flags(ctx->sw_cipher.cipher,
 | 
						|
+		crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
 | 
						|
+	rc = crypto_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
 | 
						|
+	crypto_skcipher_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
 | 
						|
+	crypto_skcipher_set_flags(cipher,
 | 
						|
+		crypto_skcipher_get_flags(ctx->sw_cipher.cipher) &
 | 
						|
+			CRYPTO_TFM_RES_MASK);
 | 
						|
+
 | 
						|
+	return rc;
 | 
						|
+}
 | 
						|
+
 | 
						|
+int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
 | 
						|
+			     const u8 *key, unsigned int keylen)
 | 
						|
+{
 | 
						|
+	struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
 | 
						|
+	int rc;
 | 
						|
+
 | 
						|
+	rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
 | 
						|
+	if (rc)
 | 
						|
+		return rc;
 | 
						|
+
 | 
						|
+	return crypto4xx_setkey_aes(cipher, key, keylen,
 | 
						|
+		CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
 | 
						|
+}
 | 
						|
+
 | 
						|
+int crypto4xx_encrypt_ctr(struct skcipher_request *req)
 | 
						|
+{
 | 
						|
+	return crypto4xx_ctr_crypt(req, true);
 | 
						|
+}
 | 
						|
+
 | 
						|
+int crypto4xx_decrypt_ctr(struct skcipher_request *req)
 | 
						|
+{
 | 
						|
+	return crypto4xx_ctr_crypt(req, false);
 | 
						|
+}
 | 
						|
+
 | 
						|
 static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
 | 
						|
 						bool is_ccm, bool decrypt)
 | 
						|
 {
 | 
						|
@@ -283,10 +362,10 @@ static int crypto4xx_aead_fallback(struc
 | 
						|
 			    crypto_aead_encrypt(subreq);
 | 
						|
 }
 | 
						|
 
 | 
						|
-static int crypto4xx_setup_fallback(struct crypto4xx_ctx *ctx,
 | 
						|
-				    struct crypto_aead *cipher,
 | 
						|
-				    const u8 *key,
 | 
						|
-				    unsigned int keylen)
 | 
						|
+static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
 | 
						|
+					 struct crypto_aead *cipher,
 | 
						|
+					 const u8 *key,
 | 
						|
+					 unsigned int keylen)
 | 
						|
 {
 | 
						|
 	int rc;
 | 
						|
 
 | 
						|
@@ -314,7 +393,7 @@ int crypto4xx_setkey_aes_ccm(struct cryp
 | 
						|
 	struct dynamic_sa_ctl *sa;
 | 
						|
 	int rc = 0;
 | 
						|
 
 | 
						|
-	rc = crypto4xx_setup_fallback(ctx, cipher, key, keylen);
 | 
						|
+	rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
 | 
						|
 	if (rc)
 | 
						|
 		return rc;
 | 
						|
 
 | 
						|
@@ -473,7 +552,7 @@ int crypto4xx_setkey_aes_gcm(struct cryp
 | 
						|
 		return -EINVAL;
 | 
						|
 	}
 | 
						|
 
 | 
						|
-	rc = crypto4xx_setup_fallback(ctx, cipher, key, keylen);
 | 
						|
+	rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
 | 
						|
 	if (rc)
 | 
						|
 		return rc;
 | 
						|
 
 | 
						|
--- a/drivers/crypto/amcc/crypto4xx_core.c
 | 
						|
+++ b/drivers/crypto/amcc/crypto4xx_core.c
 | 
						|
@@ -946,6 +946,19 @@ static int crypto4xx_sk_init(struct cryp
 | 
						|
 	struct crypto4xx_alg *amcc_alg;
 | 
						|
 	struct crypto4xx_ctx *ctx =  crypto_skcipher_ctx(sk);
 | 
						|
 
 | 
						|
+	if (alg->base.cra_flags & CRYPTO_ALG_NEED_FALLBACK) {
 | 
						|
+		ctx->sw_cipher.cipher =
 | 
						|
+			crypto_alloc_skcipher(alg->base.cra_name, 0,
 | 
						|
+					      CRYPTO_ALG_NEED_FALLBACK |
 | 
						|
+					      CRYPTO_ALG_ASYNC);
 | 
						|
+		if (IS_ERR(ctx->sw_cipher.cipher))
 | 
						|
+			return PTR_ERR(ctx->sw_cipher.cipher);
 | 
						|
+
 | 
						|
+		crypto_skcipher_set_reqsize(sk,
 | 
						|
+			sizeof(struct skcipher_request) + 32 +
 | 
						|
+			crypto_skcipher_reqsize(ctx->sw_cipher.cipher));
 | 
						|
+	}
 | 
						|
+
 | 
						|
 	amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.cipher);
 | 
						|
 	crypto4xx_ctx_init(amcc_alg, ctx);
 | 
						|
 	return 0;
 | 
						|
@@ -961,6 +974,8 @@ static void crypto4xx_sk_exit(struct cry
 | 
						|
 	struct crypto4xx_ctx *ctx =  crypto_skcipher_ctx(sk);
 | 
						|
 
 | 
						|
 	crypto4xx_common_exit(ctx);
 | 
						|
+	if (ctx->sw_cipher.cipher)
 | 
						|
+		crypto_free_skcipher(ctx->sw_cipher.cipher);
 | 
						|
 }
 | 
						|
 
 | 
						|
 static int crypto4xx_aead_init(struct crypto_aead *tfm)
 | 
						|
@@ -1150,6 +1165,28 @@ static struct crypto4xx_alg_common crypt
 | 
						|
 		.init = crypto4xx_sk_init,
 | 
						|
 		.exit = crypto4xx_sk_exit,
 | 
						|
 	} },
 | 
						|
+	{ .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
 | 
						|
+		.base = {
 | 
						|
+			.cra_name = "ctr(aes)",
 | 
						|
+			.cra_driver_name = "ctr-aes-ppc4xx",
 | 
						|
+			.cra_priority = CRYPTO4XX_CRYPTO_PRIORITY,
 | 
						|
+			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
 | 
						|
+				CRYPTO_ALG_NEED_FALLBACK |
 | 
						|
+				CRYPTO_ALG_ASYNC |
 | 
						|
+				CRYPTO_ALG_KERN_DRIVER_ONLY,
 | 
						|
+			.cra_blocksize = AES_BLOCK_SIZE,
 | 
						|
+			.cra_ctxsize = sizeof(struct crypto4xx_ctx),
 | 
						|
+			.cra_module = THIS_MODULE,
 | 
						|
+		},
 | 
						|
+		.min_keysize = AES_MIN_KEY_SIZE,
 | 
						|
+		.max_keysize = AES_MAX_KEY_SIZE,
 | 
						|
+		.ivsize	= AES_IV_SIZE,
 | 
						|
+		.setkey	= crypto4xx_setkey_aes_ctr,
 | 
						|
+		.encrypt = crypto4xx_encrypt_ctr,
 | 
						|
+		.decrypt = crypto4xx_decrypt_ctr,
 | 
						|
+		.init = crypto4xx_sk_init,
 | 
						|
+		.exit = crypto4xx_sk_exit,
 | 
						|
+	} },
 | 
						|
 	{ .type = CRYPTO_ALG_TYPE_SKCIPHER, .u.cipher = {
 | 
						|
 		.base = {
 | 
						|
 			.cra_name = "rfc3686(ctr(aes))",
 | 
						|
--- a/drivers/crypto/amcc/crypto4xx_core.h
 | 
						|
+++ b/drivers/crypto/amcc/crypto4xx_core.h
 | 
						|
@@ -128,6 +128,7 @@ struct crypto4xx_ctx {
 | 
						|
 	__le32 iv_nonce;
 | 
						|
 	u32 sa_len;
 | 
						|
 	union {
 | 
						|
+		struct crypto_skcipher *cipher;
 | 
						|
 		struct crypto_aead *aead;
 | 
						|
 	} sw_cipher;
 | 
						|
 };
 | 
						|
@@ -163,12 +164,16 @@ int crypto4xx_setkey_aes_cbc(struct cryp
 | 
						|
 			     const u8 *key, unsigned int keylen);
 | 
						|
 int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
 | 
						|
 			     const u8 *key, unsigned int keylen);
 | 
						|
+int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
 | 
						|
+			     const u8 *key, unsigned int keylen);
 | 
						|
 int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
 | 
						|
 			     const u8 *key, unsigned int keylen);
 | 
						|
 int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
 | 
						|
 			     const u8 *key, unsigned int keylen);
 | 
						|
 int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
 | 
						|
 			     const u8 *key, unsigned int keylen);
 | 
						|
+int crypto4xx_encrypt_ctr(struct skcipher_request *req);
 | 
						|
+int crypto4xx_decrypt_ctr(struct skcipher_request *req);
 | 
						|
 int crypto4xx_encrypt_iv(struct skcipher_request *req);
 | 
						|
 int crypto4xx_decrypt_iv(struct skcipher_request *req);
 | 
						|
 int crypto4xx_encrypt_noiv(struct skcipher_request *req);
 |