aboutsummaryrefslogtreecommitdiffstats
path: root/module/icp/io
diff options
context:
space:
mode:
Diffstat (limited to 'module/icp/io')
-rw-r--r--module/icp/io/aes.c23
-rw-r--r--module/icp/io/sha2_mod.c50
-rw-r--r--module/icp/io/skein_mod.c26
3 files changed, 48 insertions, 51 deletions
diff --git a/module/icp/io/aes.c b/module/icp/io/aes.c
index b0f51262d..945d560eb 100644
--- a/module/icp/io/aes.c
+++ b/module/icp/io/aes.c
@@ -832,7 +832,7 @@ aes_encrypt_atomic(crypto_mechanism_t *mechanism,
crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,
crypto_spi_ctx_template_t template)
{
- aes_ctx_t aes_ctx; /* on the stack */
+ aes_ctx_t aes_ctx = {{{{0}}}};
off_t saved_offset;
size_t saved_length;
size_t length_needed;
@@ -858,8 +858,6 @@ aes_encrypt_atomic(crypto_mechanism_t *mechanism,
if ((ret = aes_check_mech_param(mechanism, NULL)) != CRYPTO_SUCCESS)
return (ret);
- bzero(&aes_ctx, sizeof (aes_ctx_t));
-
ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
KM_SLEEP, B_TRUE);
if (ret != CRYPTO_SUCCESS)
@@ -944,7 +942,7 @@ aes_encrypt_atomic(crypto_mechanism_t *mechanism,
out:
if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
- bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
+ memset(aes_ctx.ac_keysched, 0, aes_ctx.ac_keysched_len);
kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
}
#ifdef CAN_USE_GCM_ASM
@@ -953,7 +951,7 @@ out:
gcm_ctx_t *ctx = (gcm_ctx_t *)&aes_ctx;
- bzero(ctx->gcm_Htable, ctx->gcm_htab_len);
+ memset(ctx->gcm_Htable, 0, ctx->gcm_htab_len);
kmem_free(ctx->gcm_Htable, ctx->gcm_htab_len);
}
#endif
@@ -966,7 +964,7 @@ aes_decrypt_atomic(crypto_mechanism_t *mechanism,
crypto_key_t *key, crypto_data_t *ciphertext, crypto_data_t *plaintext,
crypto_spi_ctx_template_t template)
{
- aes_ctx_t aes_ctx; /* on the stack */
+ aes_ctx_t aes_ctx = {{{{0}}}};
off_t saved_offset;
size_t saved_length;
size_t length_needed;
@@ -992,8 +990,6 @@ aes_decrypt_atomic(crypto_mechanism_t *mechanism,
if ((ret = aes_check_mech_param(mechanism, NULL)) != CRYPTO_SUCCESS)
return (ret);
- bzero(&aes_ctx, sizeof (aes_ctx_t));
-
ret = aes_common_init_ctx(&aes_ctx, template, mechanism, key,
KM_SLEEP, B_FALSE);
if (ret != CRYPTO_SUCCESS)
@@ -1096,7 +1092,7 @@ aes_decrypt_atomic(crypto_mechanism_t *mechanism,
out:
if (aes_ctx.ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
- bzero(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
+ memset(aes_ctx.ac_keysched, 0, aes_ctx.ac_keysched_len);
kmem_free(aes_ctx.ac_keysched, aes_ctx.ac_keysched_len);
}
@@ -1113,7 +1109,7 @@ out:
if (((gcm_ctx_t *)&aes_ctx)->gcm_Htable != NULL) {
gcm_ctx_t *ctx = (gcm_ctx_t *)&aes_ctx;
- bzero(ctx->gcm_Htable, ctx->gcm_htab_len);
+ memset(ctx->gcm_Htable, 0, ctx->gcm_htab_len);
kmem_free(ctx->gcm_Htable, ctx->gcm_htab_len);
}
#endif
@@ -1150,7 +1146,7 @@ aes_create_ctx_template(crypto_mechanism_t *mechanism, crypto_key_t *key,
* in the key.
*/
if ((rv = init_keysched(key, keysched)) != CRYPTO_SUCCESS) {
- bzero(keysched, size);
+ memset(keysched, 0, size);
kmem_free(keysched, size);
return (rv);
}
@@ -1170,7 +1166,8 @@ aes_free_context(crypto_ctx_t *ctx)
if (aes_ctx != NULL) {
if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
ASSERT(aes_ctx->ac_keysched_len != 0);
- bzero(aes_ctx->ac_keysched, aes_ctx->ac_keysched_len);
+ memset(aes_ctx->ac_keysched, 0,
+ aes_ctx->ac_keysched_len);
kmem_free(aes_ctx->ac_keysched,
aes_ctx->ac_keysched_len);
}
@@ -1260,7 +1257,7 @@ aes_common_init_ctx(aes_ctx_t *aes_ctx, crypto_spi_ctx_template_t *template,
if (rv != CRYPTO_SUCCESS) {
if (aes_ctx->ac_flags & PROVIDER_OWNS_KEY_SCHEDULE) {
- bzero(keysched, size);
+ memset(keysched, 0, size);
kmem_free(keysched, size);
}
}
diff --git a/module/icp/io/sha2_mod.c b/module/icp/io/sha2_mod.c
index c586c3272..4a218b500 100644
--- a/module/icp/io/sha2_mod.c
+++ b/module/icp/io/sha2_mod.c
@@ -46,7 +46,7 @@
(len) = (uint32_t)*((ulong_t *)(m)->cm_param); \
else { \
ulong_t tmp_ulong; \
- bcopy((m)->cm_param, &tmp_ulong, sizeof (ulong_t)); \
+ memcpy(&tmp_ulong, (m)->cm_param, sizeof (ulong_t)); \
(len) = (uint32_t)tmp_ulong; \
} \
}
@@ -309,9 +309,9 @@ sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
*/
SHA2Final(digest_scratch, sha2_ctx);
- bcopy(digest_scratch, (uchar_t *)
+ memcpy((uchar_t *)
zfs_uio_iovbase(digest->cd_uio, vec_idx) + offset,
- digest_len);
+ digest_scratch, digest_len);
} else {
SHA2Final((uchar_t *)zfs_uio_iovbase(digest->
cd_uio, vec_idx) + offset,
@@ -336,8 +336,9 @@ sha2_digest_final_uio(SHA2_CTX *sha2_ctx, crypto_data_t *digest,
cur_len =
MIN(zfs_uio_iovlen(digest->cd_uio, vec_idx) -
offset, length);
- bcopy(digest_tmp + scratch_offset,
+ memcpy(
zfs_uio_iovbase(digest->cd_uio, vec_idx) + offset,
+ digest_tmp + scratch_offset,
cur_len);
length -= cur_len;
@@ -630,8 +631,8 @@ sha2_digest_atomic(crypto_mechanism_t *mechanism, crypto_data_t *data,
static void
sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
{
- uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
- uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)];
+ uint64_t ipad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)] = {0};
+ uint64_t opad[SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t)] = {0};
int i, block_size, blocks_per_int64;
/* Determine the block size */
@@ -643,12 +644,12 @@ sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
blocks_per_int64 = SHA512_HMAC_BLOCK_SIZE / sizeof (uint64_t);
}
- (void) bzero(ipad, block_size);
- (void) bzero(opad, block_size);
+ (void) memset(ipad, 0, block_size);
+ (void) memset(opad, 0, block_size);
if (keyval != NULL) {
- (void) bcopy(keyval, ipad, length_in_bytes);
- (void) bcopy(keyval, opad, length_in_bytes);
+ (void) memcpy(ipad, keyval, length_in_bytes);
+ (void) memcpy(opad, keyval, length_in_bytes);
} else {
ASSERT0(length_in_bytes);
}
@@ -666,7 +667,6 @@ sha2_mac_init_ctx(sha2_hmac_ctx_t *ctx, void *keyval, uint_t length_in_bytes)
/* perform SHA2 on opad */
SHA2Init(ctx->hc_mech_type, &ctx->hc_ocontext);
SHA2Update(&ctx->hc_ocontext, (uint8_t *)opad, block_size);
-
}
/*
@@ -708,7 +708,7 @@ sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
PROV_SHA2_HMAC_CTX(ctx)->hc_mech_type = mechanism->cm_type;
if (ctx_template != NULL) {
/* reuse context template */
- bcopy(ctx_template, PROV_SHA2_HMAC_CTX(ctx),
+ memcpy(PROV_SHA2_HMAC_CTX(ctx), ctx_template,
sizeof (sha2_hmac_ctx_t));
} else {
/* no context template, compute context */
@@ -746,7 +746,7 @@ sha2_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
}
if (ret != CRYPTO_SUCCESS) {
- bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
+ memset(ctx->cc_provider_private, 0, sizeof (sha2_hmac_ctx_t));
kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
ctx->cc_provider_private = NULL;
}
@@ -850,8 +850,8 @@ sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac)
*/
SHA2Final(digest,
&PROV_SHA2_HMAC_CTX(ctx)->hc_ocontext);
- bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
- mac->cd_offset, digest_len);
+ memcpy((unsigned char *)mac->cd_raw.iov_base +
+ mac->cd_offset, digest, digest_len);
} else {
SHA2Final((unsigned char *)mac->cd_raw.iov_base +
mac->cd_offset,
@@ -872,7 +872,7 @@ sha2_mac_final(crypto_ctx_t *ctx, crypto_data_t *mac)
else
mac->cd_length = 0;
- bzero(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
+ memset(ctx->cc_provider_private, 0, sizeof (sha2_hmac_ctx_t));
kmem_free(ctx->cc_provider_private, sizeof (sha2_hmac_ctx_t));
ctx->cc_provider_private = NULL;
@@ -928,7 +928,7 @@ sha2_mac_atomic(crypto_mechanism_t *mechanism,
if (ctx_template != NULL) {
/* reuse context template */
- bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
+ memcpy(&sha2_hmac_ctx, ctx_template, sizeof (sha2_hmac_ctx_t));
} else {
sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
/* no context template, initialize context */
@@ -1001,8 +1001,8 @@ sha2_mac_atomic(crypto_mechanism_t *mechanism,
* the user only what was requested.
*/
SHA2Final(digest, &sha2_hmac_ctx.hc_ocontext);
- bcopy(digest, (unsigned char *)mac->cd_raw.iov_base +
- mac->cd_offset, digest_len);
+ memcpy((unsigned char *)mac->cd_raw.iov_base +
+ mac->cd_offset, digest, digest_len);
} else {
SHA2Final((unsigned char *)mac->cd_raw.iov_base +
mac->cd_offset, &sha2_hmac_ctx.hc_ocontext);
@@ -1021,7 +1021,7 @@ sha2_mac_atomic(crypto_mechanism_t *mechanism,
return (CRYPTO_SUCCESS);
}
bail:
- bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
+ memset(&sha2_hmac_ctx, 0, sizeof (sha2_hmac_ctx_t));
mac->cd_length = 0;
return (ret);
}
@@ -1060,7 +1060,7 @@ sha2_mac_verify_atomic(crypto_mechanism_t *mechanism,
if (ctx_template != NULL) {
/* reuse context template */
- bcopy(ctx_template, &sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
+ memcpy(&sha2_hmac_ctx, ctx_template, sizeof (sha2_hmac_ctx_t));
} else {
sha2_hmac_ctx.hc_mech_type = mechanism->cm_type;
/* no context template, initialize context */
@@ -1137,7 +1137,7 @@ sha2_mac_verify_atomic(crypto_mechanism_t *mechanism,
switch (mac->cd_format) {
case CRYPTO_DATA_RAW:
- if (bcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
+ if (memcmp(digest, (unsigned char *)mac->cd_raw.iov_base +
mac->cd_offset, digest_len) != 0)
ret = CRYPTO_INVALID_MAC;
break;
@@ -1170,7 +1170,7 @@ sha2_mac_verify_atomic(crypto_mechanism_t *mechanism,
cur_len = MIN(zfs_uio_iovlen(mac->cd_uio, vec_idx) -
offset, length);
- if (bcmp(digest + scratch_offset,
+ if (memcmp(digest + scratch_offset,
zfs_uio_iovbase(mac->cd_uio, vec_idx) + offset,
cur_len) != 0) {
ret = CRYPTO_INVALID_MAC;
@@ -1191,7 +1191,7 @@ sha2_mac_verify_atomic(crypto_mechanism_t *mechanism,
return (ret);
bail:
- bzero(&sha2_hmac_ctx, sizeof (sha2_hmac_ctx_t));
+ memset(&sha2_hmac_ctx, 0, sizeof (sha2_hmac_ctx_t));
mac->cd_length = 0;
return (ret);
}
@@ -1282,7 +1282,7 @@ sha2_free_context(crypto_ctx_t *ctx)
else
ctx_len = sizeof (sha2_hmac_ctx_t);
- bzero(ctx->cc_provider_private, ctx_len);
+ memset(ctx->cc_provider_private, 0, ctx_len);
kmem_free(ctx->cc_provider_private, ctx_len);
ctx->cc_provider_private = NULL;
diff --git a/module/icp/io/skein_mod.c b/module/icp/io/skein_mod.c
index 1d6969e68..a2ed6cedd 100644
--- a/module/icp/io/skein_mod.c
+++ b/module/icp/io/skein_mod.c
@@ -292,8 +292,8 @@ skein_digest_final_uio(skein_ctx_t *ctx, crypto_data_t *digest)
while (vec_idx < zfs_uio_iovcnt(uio) && length > 0) {
cur_len = MIN(zfs_uio_iovlen(uio, vec_idx) - offset,
length);
- bcopy(digest_tmp + scratch_offset,
- zfs_uio_iovbase(uio, vec_idx) + offset, cur_len);
+ memcpy(zfs_uio_iovbase(uio, vec_idx) + offset,
+ digest_tmp + scratch_offset, cur_len);
length -= cur_len;
vec_idx++;
@@ -349,7 +349,7 @@ skein_digest_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism)
return (CRYPTO_SUCCESS);
errout:
- bzero(SKEIN_CTX(ctx), sizeof (*SKEIN_CTX(ctx)));
+ memset(SKEIN_CTX(ctx), 0, sizeof (*SKEIN_CTX(ctx)));
kmem_free(SKEIN_CTX(ctx), sizeof (*SKEIN_CTX(ctx)));
SKEIN_CTX_LVALUE(ctx) = NULL;
return (error);
@@ -376,7 +376,7 @@ skein_digest(crypto_ctx_t *ctx, crypto_data_t *data, crypto_data_t *digest)
error = skein_update(ctx, data);
if (error != CRYPTO_SUCCESS) {
- bzero(SKEIN_CTX(ctx), sizeof (*SKEIN_CTX(ctx)));
+ memset(SKEIN_CTX(ctx), 0, sizeof (*SKEIN_CTX(ctx)));
kmem_free(SKEIN_CTX(ctx), sizeof (*SKEIN_CTX(ctx)));
SKEIN_CTX_LVALUE(ctx) = NULL;
digest->cd_length = 0;
@@ -452,7 +452,7 @@ skein_final(crypto_ctx_t *ctx, crypto_data_t *digest)
else
digest->cd_length = 0;
- bzero(SKEIN_CTX(ctx), sizeof (*SKEIN_CTX(ctx)));
+ memset(SKEIN_CTX(ctx), 0, sizeof (*SKEIN_CTX(ctx)));
kmem_free(SKEIN_CTX(ctx), sizeof (*(SKEIN_CTX(ctx))));
SKEIN_CTX_LVALUE(ctx) = NULL;
@@ -494,7 +494,7 @@ out:
CRYPTO_BITS2BYTES(skein_ctx.sc_digest_bitlen);
else
digest->cd_length = 0;
- bzero(&skein_ctx, sizeof (skein_ctx));
+ memset(&skein_ctx, 0, sizeof (skein_ctx));
return (error);
}
@@ -543,7 +543,7 @@ skein_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
return (CRYPTO_HOST_MEMORY);
if (ctx_template != NULL) {
- bcopy(ctx_template, SKEIN_CTX(ctx),
+ memcpy(SKEIN_CTX(ctx), ctx_template,
sizeof (*SKEIN_CTX(ctx)));
} else {
error = skein_mac_ctx_build(SKEIN_CTX(ctx), mechanism, key);
@@ -553,7 +553,7 @@ skein_mac_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
return (CRYPTO_SUCCESS);
errout:
- bzero(SKEIN_CTX(ctx), sizeof (*SKEIN_CTX(ctx)));
+ memset(SKEIN_CTX(ctx), 0, sizeof (*SKEIN_CTX(ctx)));
kmem_free(SKEIN_CTX(ctx), sizeof (*SKEIN_CTX(ctx)));
return (error);
}
@@ -573,13 +573,13 @@ skein_mac_atomic(crypto_mechanism_t *mechanism,
crypto_spi_ctx_template_t ctx_template)
{
/* faux crypto context just for skein_digest_{update,final} */
- int error;
+ int error;
crypto_ctx_t ctx;
skein_ctx_t skein_ctx;
SKEIN_CTX_LVALUE(&ctx) = &skein_ctx;
if (ctx_template != NULL) {
- bcopy(ctx_template, &skein_ctx, sizeof (skein_ctx));
+ memcpy(&skein_ctx, ctx_template, sizeof (skein_ctx));
} else {
error = skein_mac_ctx_build(&skein_ctx, mechanism, key);
if (error != CRYPTO_SUCCESS)
@@ -593,7 +593,7 @@ skein_mac_atomic(crypto_mechanism_t *mechanism,
return (CRYPTO_SUCCESS);
errout:
- bzero(&skein_ctx, sizeof (skein_ctx));
+ memset(&skein_ctx, 0, sizeof (skein_ctx));
return (error);
}
@@ -624,7 +624,7 @@ skein_create_ctx_template(crypto_mechanism_t *mechanism, crypto_key_t *key,
return (CRYPTO_SUCCESS);
errout:
- bzero(ctx_tmpl, sizeof (*ctx_tmpl));
+ memset(ctx_tmpl, 0, sizeof (*ctx_tmpl));
kmem_free(ctx_tmpl, sizeof (*ctx_tmpl));
return (error);
}
@@ -636,7 +636,7 @@ static int
skein_free_context(crypto_ctx_t *ctx)
{
if (SKEIN_CTX(ctx) != NULL) {
- bzero(SKEIN_CTX(ctx), sizeof (*SKEIN_CTX(ctx)));
+ memset(SKEIN_CTX(ctx), 0, sizeof (*SKEIN_CTX(ctx)));
kmem_free(SKEIN_CTX(ctx), sizeof (*SKEIN_CTX(ctx)));
SKEIN_CTX_LVALUE(ctx) = NULL;
}