summaryrefslogtreecommitdiffstats
path: root/arch/s390/crypto
diff options
context:
space:
mode:
Diffstat (limited to 'arch/s390/crypto')
-rw-r--r--arch/s390/crypto/Kconfig3
-rw-r--r--arch/s390/crypto/aes_s390.c45
-rw-r--r--arch/s390/crypto/chacha-glue.c8
3 files changed, 20 insertions, 36 deletions
diff --git a/arch/s390/crypto/Kconfig b/arch/s390/crypto/Kconfig
index b760232537f1..8c4db8b64fa2 100644
--- a/arch/s390/crypto/Kconfig
+++ b/arch/s390/crypto/Kconfig
@@ -108,11 +108,12 @@ config CRYPTO_DES_S390
As of z196 the CTR mode is hardware accelerated.
config CRYPTO_CHACHA_S390
- tristate "Ciphers: ChaCha20"
+ tristate
depends on S390
select CRYPTO_SKCIPHER
select CRYPTO_LIB_CHACHA_GENERIC
select CRYPTO_ARCH_HAVE_LIB_CHACHA
+ default CRYPTO_LIB_CHACHA_INTERNAL
help
Length-preserving cipher: ChaCha20 stream cipher (RFC 7539)
diff --git a/arch/s390/crypto/aes_s390.c b/arch/s390/crypto/aes_s390.c
index 9c46b1b630b1..5d36f4020dfa 100644
--- a/arch/s390/crypto/aes_s390.c
+++ b/arch/s390/crypto/aes_s390.c
@@ -66,7 +66,6 @@ struct s390_xts_ctx {
struct gcm_sg_walk {
struct scatter_walk walk;
unsigned int walk_bytes;
- u8 *walk_ptr;
unsigned int walk_bytes_remain;
u8 buf[AES_BLOCK_SIZE];
unsigned int buf_bytes;
@@ -787,29 +786,20 @@ static void gcm_walk_start(struct gcm_sg_walk *gw, struct scatterlist *sg,
static inline unsigned int _gcm_sg_clamp_and_map(struct gcm_sg_walk *gw)
{
- struct scatterlist *nextsg;
-
- gw->walk_bytes = scatterwalk_clamp(&gw->walk, gw->walk_bytes_remain);
- while (!gw->walk_bytes) {
- nextsg = sg_next(gw->walk.sg);
- if (!nextsg)
- return 0;
- scatterwalk_start(&gw->walk, nextsg);
- gw->walk_bytes = scatterwalk_clamp(&gw->walk,
- gw->walk_bytes_remain);
- }
- gw->walk_ptr = scatterwalk_map(&gw->walk);
+ if (gw->walk_bytes_remain == 0)
+ return 0;
+ gw->walk_bytes = scatterwalk_next(&gw->walk, gw->walk_bytes_remain);
return gw->walk_bytes;
}
static inline void _gcm_sg_unmap_and_advance(struct gcm_sg_walk *gw,
- unsigned int nbytes)
+ unsigned int nbytes, bool out)
{
gw->walk_bytes_remain -= nbytes;
- scatterwalk_unmap(gw->walk_ptr);
- scatterwalk_advance(&gw->walk, nbytes);
- scatterwalk_done(&gw->walk, 0, gw->walk_bytes_remain);
- gw->walk_ptr = NULL;
+ if (out)
+ scatterwalk_done_dst(&gw->walk, nbytes);
+ else
+ scatterwalk_done_src(&gw->walk, nbytes);
}
static int gcm_in_walk_go(struct gcm_sg_walk *gw, unsigned int minbytesneeded)
@@ -835,16 +825,16 @@ static int gcm_in_walk_go(struct gcm_sg_walk *gw, unsigned int minbytesneeded)
}
if (!gw->buf_bytes && gw->walk_bytes >= minbytesneeded) {
- gw->ptr = gw->walk_ptr;
+ gw->ptr = gw->walk.addr;
gw->nbytes = gw->walk_bytes;
goto out;
}
while (1) {
n = min(gw->walk_bytes, AES_BLOCK_SIZE - gw->buf_bytes);
- memcpy(gw->buf + gw->buf_bytes, gw->walk_ptr, n);
+ memcpy(gw->buf + gw->buf_bytes, gw->walk.addr, n);
gw->buf_bytes += n;
- _gcm_sg_unmap_and_advance(gw, n);
+ _gcm_sg_unmap_and_advance(gw, n, false);
if (gw->buf_bytes >= minbytesneeded) {
gw->ptr = gw->buf;
gw->nbytes = gw->buf_bytes;
@@ -876,13 +866,12 @@ static int gcm_out_walk_go(struct gcm_sg_walk *gw, unsigned int minbytesneeded)
}
if (gw->walk_bytes >= minbytesneeded) {
- gw->ptr = gw->walk_ptr;
+ gw->ptr = gw->walk.addr;
gw->nbytes = gw->walk_bytes;
goto out;
}
- scatterwalk_unmap(gw->walk_ptr);
- gw->walk_ptr = NULL;
+ scatterwalk_unmap(&gw->walk);
gw->ptr = gw->buf;
gw->nbytes = sizeof(gw->buf);
@@ -904,7 +893,7 @@ static int gcm_in_walk_done(struct gcm_sg_walk *gw, unsigned int bytesdone)
} else
gw->buf_bytes = 0;
} else
- _gcm_sg_unmap_and_advance(gw, bytesdone);
+ _gcm_sg_unmap_and_advance(gw, bytesdone, false);
return bytesdone;
}
@@ -921,11 +910,11 @@ static int gcm_out_walk_done(struct gcm_sg_walk *gw, unsigned int bytesdone)
if (!_gcm_sg_clamp_and_map(gw))
return i;
n = min(gw->walk_bytes, bytesdone - i);
- memcpy(gw->walk_ptr, gw->buf + i, n);
- _gcm_sg_unmap_and_advance(gw, n);
+ memcpy(gw->walk.addr, gw->buf + i, n);
+ _gcm_sg_unmap_and_advance(gw, n, true);
}
} else
- _gcm_sg_unmap_and_advance(gw, bytesdone);
+ _gcm_sg_unmap_and_advance(gw, bytesdone, true);
return bytesdone;
}
diff --git a/arch/s390/crypto/chacha-glue.c b/arch/s390/crypto/chacha-glue.c
index f8b0c52e77a4..920e9f0941e7 100644
--- a/arch/s390/crypto/chacha-glue.c
+++ b/arch/s390/crypto/chacha-glue.c
@@ -41,7 +41,7 @@ static int chacha20_s390(struct skcipher_request *req)
int rc;
rc = skcipher_walk_virt(&walk, req, false);
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
while (walk.nbytes > 0) {
nbytes = walk.nbytes;
@@ -69,12 +69,6 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
}
EXPORT_SYMBOL(hchacha_block_arch);
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
-{
- chacha_init_generic(state, key, iv);
-}
-EXPORT_SYMBOL(chacha_init_arch);
-
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
unsigned int bytes, int nrounds)
{