aboutsummaryrefslogtreecommitdiffstats
path: root/module/icp/algs/modes
diff options
context:
space:
mode:
authorнаб <[email protected]>2022-02-25 14:26:54 +0100
committerBrian Behlendorf <[email protected]>2022-03-15 15:13:42 -0700
commit861166b02701dfc8f63a105bd32758e806c84fd7 (patch)
tree1f9341513470b4615ca340c40ad087101c7dcf24 /module/icp/algs/modes
parent1d77d62f5a77cab85d4b98ecf72a9838f70d6bf1 (diff)
Remove bcopy(), bzero(), bcmp()
bcopy() has a confusing argument order and is actually a move, not a copy; they're all deprecated since POSIX.1-2001 and removed in -2008, and we shim them out to mem*() on Linux anyway Reviewed-by: Brian Behlendorf <[email protected]> Signed-off-by: Ahelenia Ziemiańska <[email protected]> Closes #12996
Diffstat (limited to 'module/icp/algs/modes')
-rw-r--r--module/icp/algs/modes/cbc.c30
-rw-r--r--module/icp/algs/modes/ccm.c102
-rw-r--r--module/icp/algs/modes/ctr.c21
-rw-r--r--module/icp/algs/modes/ecb.c14
-rw-r--r--module/icp/algs/modes/gcm.c86
-rw-r--r--module/icp/algs/modes/modes.c2
6 files changed, 127 insertions, 128 deletions
diff --git a/module/icp/algs/modes/cbc.c b/module/icp/algs/modes/cbc.c
index 73605f04d..da3ff4e35 100644
--- a/module/icp/algs/modes/cbc.c
+++ b/module/icp/algs/modes/cbc.c
@@ -51,8 +51,8 @@ cbc_encrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length,
if (length + ctx->cbc_remainder_len < block_size) {
/* accumulate bytes here and return */
- bcopy(datap,
- (uint8_t *)ctx->cbc_remainder + ctx->cbc_remainder_len,
+ memcpy((uint8_t *)ctx->cbc_remainder + ctx->cbc_remainder_len,
+ datap,
length);
ctx->cbc_remainder_len += length;
ctx->cbc_copy_to = datap;
@@ -70,8 +70,8 @@ cbc_encrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length,
if (need > remainder)
return (CRYPTO_DATA_LEN_RANGE);
- bcopy(datap, &((uint8_t *)ctx->cbc_remainder)
- [ctx->cbc_remainder_len], need);
+ memcpy(&((uint8_t *)ctx->cbc_remainder)
+ [ctx->cbc_remainder_len], datap, need);
blockp = (uint8_t *)ctx->cbc_remainder;
} else {
@@ -91,10 +91,10 @@ cbc_encrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length,
if (out_data_1_len == block_size) {
copy_block(lastp, out_data_1);
} else {
- bcopy(lastp, out_data_1, out_data_1_len);
+ memcpy(out_data_1, lastp, out_data_1_len);
if (out_data_2 != NULL) {
- bcopy(lastp + out_data_1_len,
- out_data_2,
+ memcpy(out_data_2,
+ lastp + out_data_1_len,
block_size - out_data_1_len);
}
}
@@ -113,7 +113,7 @@ cbc_encrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length,
/* Incomplete last block. */
if (remainder > 0 && remainder < block_size) {
- bcopy(datap, ctx->cbc_remainder, remainder);
+ memcpy(ctx->cbc_remainder, datap, remainder);
ctx->cbc_remainder_len = remainder;
ctx->cbc_copy_to = datap;
goto out;
@@ -157,8 +157,8 @@ cbc_decrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length,
if (length + ctx->cbc_remainder_len < block_size) {
/* accumulate bytes here and return */
- bcopy(datap,
- (uint8_t *)ctx->cbc_remainder + ctx->cbc_remainder_len,
+ memcpy((uint8_t *)ctx->cbc_remainder + ctx->cbc_remainder_len,
+ datap,
length);
ctx->cbc_remainder_len += length;
ctx->cbc_copy_to = datap;
@@ -176,8 +176,8 @@ cbc_decrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length,
if (need > remainder)
return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
- bcopy(datap, &((uint8_t *)ctx->cbc_remainder)
- [ctx->cbc_remainder_len], need);
+ memcpy(&((uint8_t *)ctx->cbc_remainder)
+ [ctx->cbc_remainder_len], datap, need);
blockp = (uint8_t *)ctx->cbc_remainder;
} else {
@@ -203,9 +203,9 @@ cbc_decrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length,
crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
&out_data_1_len, &out_data_2, block_size);
- bcopy(blockp, out_data_1, out_data_1_len);
+ memcpy(out_data_1, blockp, out_data_1_len);
if (out_data_2 != NULL) {
- bcopy(blockp + out_data_1_len, out_data_2,
+ memcpy(out_data_2, blockp + out_data_1_len,
block_size - out_data_1_len);
}
@@ -224,7 +224,7 @@ cbc_decrypt_contiguous_blocks(cbc_ctx_t *ctx, char *data, size_t length,
/* Incomplete last block. */
if (remainder > 0 && remainder < block_size) {
- bcopy(datap, ctx->cbc_remainder, remainder);
+ memcpy(ctx->cbc_remainder, datap, remainder);
ctx->cbc_remainder_len = remainder;
ctx->cbc_lastp = lastp;
ctx->cbc_copy_to = datap;
diff --git a/module/icp/algs/modes/ccm.c b/module/icp/algs/modes/ccm.c
index a41cbc395..9fde2684a 100644
--- a/module/icp/algs/modes/ccm.c
+++ b/module/icp/algs/modes/ccm.c
@@ -59,8 +59,8 @@ ccm_mode_encrypt_contiguous_blocks(ccm_ctx_t *ctx, char *data, size_t length,
if (length + ctx->ccm_remainder_len < block_size) {
/* accumulate bytes here and return */
- bcopy(datap,
- (uint8_t *)ctx->ccm_remainder + ctx->ccm_remainder_len,
+ memcpy((uint8_t *)ctx->ccm_remainder + ctx->ccm_remainder_len,
+ datap,
length);
ctx->ccm_remainder_len += length;
ctx->ccm_copy_to = datap;
@@ -80,8 +80,8 @@ ccm_mode_encrypt_contiguous_blocks(ccm_ctx_t *ctx, char *data, size_t length,
if (need > remainder)
return (CRYPTO_DATA_LEN_RANGE);
- bcopy(datap, &((uint8_t *)ctx->ccm_remainder)
- [ctx->ccm_remainder_len], need);
+ memcpy(&((uint8_t *)ctx->ccm_remainder)
+ [ctx->ccm_remainder_len], datap, need);
blockp = (uint8_t *)ctx->ccm_remainder;
} else {
@@ -132,10 +132,10 @@ ccm_mode_encrypt_contiguous_blocks(ccm_ctx_t *ctx, char *data, size_t length,
if (out_data_1_len == block_size) {
copy_block(lastp, out_data_1);
} else {
- bcopy(lastp, out_data_1, out_data_1_len);
+ memcpy(out_data_1, lastp, out_data_1_len);
if (out_data_2 != NULL) {
- bcopy(lastp + out_data_1_len,
- out_data_2,
+ memcpy(out_data_2,
+ lastp + out_data_1_len,
block_size - out_data_1_len);
}
}
@@ -154,7 +154,7 @@ ccm_mode_encrypt_contiguous_blocks(ccm_ctx_t *ctx, char *data, size_t length,
/* Incomplete last block. */
if (remainder > 0 && remainder < block_size) {
- bcopy(datap, ctx->ccm_remainder, remainder);
+ memcpy(ctx->ccm_remainder, datap, remainder);
ctx->ccm_remainder_len = remainder;
ctx->ccm_copy_to = datap;
goto out;
@@ -224,10 +224,10 @@ ccm_encrypt_final(ccm_ctx_t *ctx, crypto_data_t *out, size_t block_size,
/* ccm_mac_input_buf is not used for encryption */
macp = (uint8_t *)ctx->ccm_mac_input_buf;
- bzero(macp, block_size);
+ memset(macp, 0, block_size);
/* copy remainder to temporary buffer */
- bcopy(ctx->ccm_remainder, macp, ctx->ccm_remainder_len);
+ memcpy(macp, ctx->ccm_remainder, ctx->ccm_remainder_len);
/* calculate the CBC MAC */
xor_block(macp, mac_buf);
@@ -254,33 +254,32 @@ ccm_encrypt_final(ccm_ctx_t *ctx, crypto_data_t *out, size_t block_size,
ctx->ccm_remainder_len + ctx->ccm_mac_len);
if (ctx->ccm_remainder_len > 0) {
-
/* copy temporary block to where it belongs */
if (out_data_2 == NULL) {
/* everything will fit in out_data_1 */
- bcopy(macp, out_data_1, ctx->ccm_remainder_len);
- bcopy(ccm_mac_p, out_data_1 + ctx->ccm_remainder_len,
+ memcpy(out_data_1, macp, ctx->ccm_remainder_len);
+ memcpy(out_data_1 + ctx->ccm_remainder_len, ccm_mac_p,
ctx->ccm_mac_len);
} else {
-
if (out_data_1_len < ctx->ccm_remainder_len) {
-
size_t data_2_len_used;
- bcopy(macp, out_data_1, out_data_1_len);
+ memcpy(out_data_1, macp, out_data_1_len);
data_2_len_used = ctx->ccm_remainder_len
- out_data_1_len;
- bcopy((uint8_t *)macp + out_data_1_len,
- out_data_2, data_2_len_used);
- bcopy(ccm_mac_p, out_data_2 + data_2_len_used,
+ memcpy(out_data_2,
+ (uint8_t *)macp + out_data_1_len,
+ data_2_len_used);
+ memcpy(out_data_2 + data_2_len_used,
+ ccm_mac_p,
ctx->ccm_mac_len);
} else {
- bcopy(macp, out_data_1, out_data_1_len);
+ memcpy(out_data_1, macp, out_data_1_len);
if (out_data_1_len == ctx->ccm_remainder_len) {
/* mac will be in out_data_2 */
- bcopy(ccm_mac_p, out_data_2,
+ memcpy(out_data_2, ccm_mac_p,
ctx->ccm_mac_len);
} else {
size_t len_not_used = out_data_1_len -
@@ -290,11 +289,11 @@ ccm_encrypt_final(ccm_ctx_t *ctx, crypto_data_t *out, size_t block_size,
* out_data_1, part of the mac will be
* in out_data_2
*/
- bcopy(ccm_mac_p,
- out_data_1 + ctx->ccm_remainder_len,
- len_not_used);
- bcopy(ccm_mac_p + len_not_used,
- out_data_2,
+ memcpy(out_data_1 +
+ ctx->ccm_remainder_len,
+ ccm_mac_p, len_not_used);
+ memcpy(out_data_2,
+ ccm_mac_p + len_not_used,
ctx->ccm_mac_len - len_not_used);
}
@@ -302,9 +301,9 @@ ccm_encrypt_final(ccm_ctx_t *ctx, crypto_data_t *out, size_t block_size,
}
} else {
/* copy block to where it belongs */
- bcopy(ccm_mac_p, out_data_1, out_data_1_len);
+ memcpy(out_data_1, ccm_mac_p, out_data_1_len);
if (out_data_2 != NULL) {
- bcopy(ccm_mac_p + out_data_1_len, out_data_2,
+ memcpy(out_data_2, ccm_mac_p + out_data_1_len,
block_size - out_data_1_len);
}
}
@@ -372,7 +371,7 @@ ccm_mode_decrypt_contiguous_blocks(ccm_ctx_t *ctx, char *data, size_t length,
}
tmp = (uint8_t *)ctx->ccm_mac_input_buf;
- bcopy(datap, tmp + pm_len, length);
+ memcpy(tmp + pm_len, datap, length);
ctx->ccm_processed_mac_len += length;
return (CRYPTO_SUCCESS);
@@ -405,15 +404,15 @@ ccm_mode_decrypt_contiguous_blocks(ccm_ctx_t *ctx, char *data, size_t length,
mac_len = length - pt_part;
ctx->ccm_processed_mac_len = mac_len;
- bcopy(data + pt_part, ctx->ccm_mac_input_buf, mac_len);
+ memcpy(ctx->ccm_mac_input_buf, data + pt_part, mac_len);
if (pt_part + ctx->ccm_remainder_len < block_size) {
/*
* since this is last of the ciphertext, will
* just decrypt with it here
*/
- bcopy(datap, &((uint8_t *)ctx->ccm_remainder)
- [ctx->ccm_remainder_len], pt_part);
+ memcpy(&((uint8_t *)ctx->ccm_remainder)
+ [ctx->ccm_remainder_len], datap, pt_part);
ctx->ccm_remainder_len += pt_part;
ccm_decrypt_incomplete_block(ctx, encrypt_block);
ctx->ccm_processed_data_len += ctx->ccm_remainder_len;
@@ -424,9 +423,9 @@ ccm_mode_decrypt_contiguous_blocks(ccm_ctx_t *ctx, char *data, size_t length,
length = pt_part;
}
} else if (length + ctx->ccm_remainder_len < block_size) {
- /* accumulate bytes here and return */
- bcopy(datap,
- (uint8_t *)ctx->ccm_remainder + ctx->ccm_remainder_len,
+ /* accumulate bytes here and return */
+ memcpy((uint8_t *)ctx->ccm_remainder + ctx->ccm_remainder_len,
+ datap,
length);
ctx->ccm_remainder_len += length;
ctx->ccm_copy_to = datap;
@@ -441,8 +440,8 @@ ccm_mode_decrypt_contiguous_blocks(ccm_ctx_t *ctx, char *data, size_t length,
if (need > remainder)
return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
- bcopy(datap, &((uint8_t *)ctx->ccm_remainder)
- [ctx->ccm_remainder_len], need);
+ memcpy(&((uint8_t *)ctx->ccm_remainder)
+ [ctx->ccm_remainder_len], datap, need);
blockp = (uint8_t *)ctx->ccm_remainder;
} else {
@@ -492,7 +491,7 @@ ccm_mode_decrypt_contiguous_blocks(ccm_ctx_t *ctx, char *data, size_t length,
/* Incomplete last block */
if (remainder > 0 && remainder < block_size) {
- bcopy(datap, ctx->ccm_remainder, remainder);
+ memcpy(ctx->ccm_remainder, datap, remainder);
ctx->ccm_remainder_len = remainder;
ctx->ccm_copy_to = datap;
if (ctx->ccm_processed_mac_len > 0) {
@@ -539,10 +538,9 @@ ccm_decrypt_final(ccm_ctx_t *ctx, crypto_data_t *out, size_t block_size,
macp = (uint8_t *)ctx->ccm_tmp;
while (mac_remain > 0) {
-
if (mac_remain < block_size) {
- bzero(macp, block_size);
- bcopy(pt, macp, mac_remain);
+ memset(macp, 0, block_size);
+ memcpy(macp, pt, mac_remain);
mac_remain = 0;
} else {
copy_block(pt, macp);
@@ -560,7 +558,7 @@ ccm_decrypt_final(ccm_ctx_t *ctx, crypto_data_t *out, size_t block_size,
calculate_ccm_mac((ccm_ctx_t *)ctx, ccm_mac_p, encrypt_block);
/* compare the input CCM MAC value with what we calculated */
- if (bcmp(ctx->ccm_mac_input_buf, ccm_mac_p, ctx->ccm_mac_len)) {
+ if (memcmp(ctx->ccm_mac_input_buf, ccm_mac_p, ctx->ccm_mac_len)) {
/* They don't match */
return (CRYPTO_INVALID_MAC);
} else {
@@ -654,10 +652,10 @@ ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize,
b0[0] = (have_adata << 6) | (((t - 2) / 2) << 3) | (q - 1);
/* copy the nonce value into b0 */
- bcopy(nonce, &(b0[1]), nonceSize);
+ memcpy(&(b0[1]), nonce, nonceSize);
/* store the length of the payload into b0 */
- bzero(&(b0[1+nonceSize]), q);
+ memset(&(b0[1+nonceSize]), 0, q);
payloadSize = aes_ctx->ccm_data_len;
limit = 8 < q ? 8 : q;
@@ -673,9 +671,9 @@ ccm_format_initial_blocks(uchar_t *nonce, ulong_t nonceSize,
cb[0] = 0x07 & (q-1); /* first byte */
/* copy the nonce value into the counter block */
- bcopy(nonce, &(cb[1]), nonceSize);
+ memcpy(&(cb[1]), nonce, nonceSize);
- bzero(&(cb[1+nonceSize]), q);
+ memset(&(cb[1+nonceSize]), 0, q);
/* Create the mask for the counter field based on the size of nonce */
q <<= 3;
@@ -782,7 +780,7 @@ ccm_init(ccm_ctx_t *ctx, unsigned char *nonce, size_t nonce_len,
/* The IV for CBC MAC for AES CCM mode is always zero */
ivp = (uint8_t *)ctx->ccm_tmp;
- bzero(ivp, block_size);
+ memset(ivp, 0, block_size);
xor_block(ivp, mac_buf);
@@ -800,14 +798,14 @@ ccm_init(ccm_ctx_t *ctx, unsigned char *nonce, size_t nonce_len,
/* 1st block: it contains encoded associated data, and some data */
authp = (uint8_t *)ctx->ccm_tmp;
- bzero(authp, block_size);
- bcopy(encoded_a, authp, encoded_a_len);
+ memset(authp, 0, block_size);
+ memcpy(authp, encoded_a, encoded_a_len);
processed = block_size - encoded_a_len;
if (processed > auth_data_len) {
/* in case auth_data is very small */
processed = auth_data_len;
}
- bcopy(auth_data, authp+encoded_a_len, processed);
+ memcpy(authp+encoded_a_len, auth_data, processed);
/* xor with previous buffer */
xor_block(authp, mac_buf);
encrypt_block(ctx->ccm_keysched, mac_buf, mac_buf);
@@ -823,8 +821,8 @@ ccm_init(ccm_ctx_t *ctx, unsigned char *nonce, size_t nonce_len,
* There's not a block full of data, pad rest of
* buffer with zero
*/
- bzero(authp, block_size);
- bcopy(&(auth_data[processed]), authp, remainder);
+ memset(authp, 0, block_size);
+ memcpy(authp, &(auth_data[processed]), remainder);
datap = (uint8_t *)authp;
remainder = 0;
} else {
diff --git a/module/icp/algs/modes/ctr.c b/module/icp/algs/modes/ctr.c
index 82295cda8..c31c62516 100644
--- a/module/icp/algs/modes/ctr.c
+++ b/module/icp/algs/modes/ctr.c
@@ -52,8 +52,8 @@ ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length,
if (length + ctx->ctr_remainder_len < block_size) {
/* accumulate bytes here and return */
- bcopy(datap,
- (uint8_t *)ctx->ctr_remainder + ctx->ctr_remainder_len,
+ memcpy((uint8_t *)ctx->ctr_remainder + ctx->ctr_remainder_len,
+ datap,
length);
ctx->ctr_remainder_len += length;
ctx->ctr_copy_to = datap;
@@ -71,8 +71,8 @@ ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length,
if (need > remainder)
return (CRYPTO_DATA_LEN_RANGE);
- bcopy(datap, &((uint8_t *)ctx->ctr_remainder)
- [ctx->ctr_remainder_len], need);
+ memcpy(&((uint8_t *)ctx->ctr_remainder)
+ [ctx->ctr_remainder_len], datap, need);
blockp = (uint8_t *)ctx->ctr_remainder;
} else {
@@ -114,9 +114,9 @@ ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length,
&out_data_1_len, &out_data_2, block_size);
/* copy block to where it belongs */
- bcopy(lastp, out_data_1, out_data_1_len);
+ memcpy(out_data_1, lastp, out_data_1_len);
if (out_data_2 != NULL) {
- bcopy(lastp + out_data_1_len, out_data_2,
+ memcpy(out_data_2, lastp + out_data_1_len,
block_size - out_data_1_len);
}
/* update offset */
@@ -134,7 +134,7 @@ ctr_mode_contiguous_blocks(ctr_ctx_t *ctx, char *data, size_t length,
/* Incomplete last block. */
if (remainder > 0 && remainder < block_size) {
- bcopy(datap, ctx->ctr_remainder, remainder);
+ memcpy(ctx->ctr_remainder, datap, remainder);
ctx->ctr_remainder_len = remainder;
ctx->ctr_copy_to = datap;
goto out;
@@ -176,10 +176,11 @@ ctr_mode_final(ctr_ctx_t *ctx, crypto_data_t *out,
crypto_get_ptrs(out, &iov_or_mp, &offset, &out_data_1,
&out_data_1_len, &out_data_2, ctx->ctr_remainder_len);
- bcopy(p, out_data_1, out_data_1_len);
+ memcpy(out_data_1, p, out_data_1_len);
if (out_data_2 != NULL) {
- bcopy((uint8_t *)p + out_data_1_len,
- out_data_2, ctx->ctr_remainder_len - out_data_1_len);
+ memcpy(out_data_2,
+ (uint8_t *)p + out_data_1_len,
+ ctx->ctr_remainder_len - out_data_1_len);
}
out->cd_offset += ctx->ctr_remainder_len;
ctx->ctr_remainder_len = 0;
diff --git a/module/icp/algs/modes/ecb.c b/module/icp/algs/modes/ecb.c
index ffbdb9d57..e0b8ab15c 100644
--- a/module/icp/algs/modes/ecb.c
+++ b/module/icp/algs/modes/ecb.c
@@ -49,8 +49,8 @@ ecb_cipher_contiguous_blocks(ecb_ctx_t *ctx, char *data, size_t length,
if (length + ctx->ecb_remainder_len < block_size) {
/* accumulate bytes here and return */
- bcopy(datap,
- (uint8_t *)ctx->ecb_remainder + ctx->ecb_remainder_len,
+ memcpy((uint8_t *)ctx->ecb_remainder + ctx->ecb_remainder_len,
+ datap,
length);
ctx->ecb_remainder_len += length;
ctx->ecb_copy_to = datap;
@@ -68,8 +68,8 @@ ecb_cipher_contiguous_blocks(ecb_ctx_t *ctx, char *data, size_t length,
if (need > remainder)
return (CRYPTO_DATA_LEN_RANGE);
- bcopy(datap, &((uint8_t *)ctx->ecb_remainder)
- [ctx->ecb_remainder_len], need);
+ memcpy(&((uint8_t *)ctx->ecb_remainder)
+ [ctx->ecb_remainder_len], datap, need);
blockp = (uint8_t *)ctx->ecb_remainder;
} else {
@@ -81,9 +81,9 @@ ecb_cipher_contiguous_blocks(ecb_ctx_t *ctx, char *data, size_t length,
&out_data_1_len, &out_data_2, block_size);
/* copy block to where it belongs */
- bcopy(lastp, out_data_1, out_data_1_len);
+ memcpy(out_data_1, lastp, out_data_1_len);
if (out_data_2 != NULL) {
- bcopy(lastp + out_data_1_len, out_data_2,
+ memcpy(out_data_2, lastp + out_data_1_len,
block_size - out_data_1_len);
}
/* update offset */
@@ -101,7 +101,7 @@ ecb_cipher_contiguous_blocks(ecb_ctx_t *ctx, char *data, size_t length,
/* Incomplete last block. */
if (remainder > 0 && remainder < block_size) {
- bcopy(datap, ctx->ecb_remainder, remainder);
+ memcpy(ctx->ecb_remainder, datap, remainder);
ctx->ecb_remainder_len = remainder;
ctx->ecb_copy_to = datap;
goto out;
diff --git a/module/icp/algs/modes/gcm.c b/module/icp/algs/modes/gcm.c
index 7d34c2b04..e666b45b5 100644
--- a/module/icp/algs/modes/gcm.c
+++ b/module/icp/algs/modes/gcm.c
@@ -108,8 +108,8 @@ gcm_mode_encrypt_contiguous_blocks(gcm_ctx_t *ctx, char *data, size_t length,
if (length + ctx->gcm_remainder_len < block_size) {
/* accumulate bytes here and return */
- bcopy(datap,
- (uint8_t *)ctx->gcm_remainder + ctx->gcm_remainder_len,
+ memcpy((uint8_t *)ctx->gcm_remainder + ctx->gcm_remainder_len,
+ datap,
length);
ctx->gcm_remainder_len += length;
if (ctx->gcm_copy_to == NULL) {
@@ -130,8 +130,8 @@ gcm_mode_encrypt_contiguous_blocks(gcm_ctx_t *ctx, char *data, size_t length,
if (need > remainder)
return (CRYPTO_DATA_LEN_RANGE);
- bcopy(datap, &((uint8_t *)ctx->gcm_remainder)
- [ctx->gcm_remainder_len], need);
+ memcpy(&((uint8_t *)ctx->gcm_remainder)
+ [ctx->gcm_remainder_len], datap, need);
blockp = (uint8_t *)ctx->gcm_remainder;
} else {
@@ -162,10 +162,10 @@ gcm_mode_encrypt_contiguous_blocks(gcm_ctx_t *ctx, char *data, size_t length,
if (out_data_1_len == block_size) {
copy_block(lastp, out_data_1);
} else {
- bcopy(lastp, out_data_1, out_data_1_len);
+ memcpy(out_data_1, lastp, out_data_1_len);
if (out_data_2 != NULL) {
- bcopy(lastp + out_data_1_len,
- out_data_2,
+ memcpy(out_data_2,
+ lastp + out_data_1_len,
block_size - out_data_1_len);
}
}
@@ -187,7 +187,7 @@ gcm_mode_encrypt_contiguous_blocks(gcm_ctx_t *ctx, char *data, size_t length,
/* Incomplete last block. */
if (remainder > 0 && remainder < block_size) {
- bcopy(datap, ctx->gcm_remainder, remainder);
+ memcpy(ctx->gcm_remainder, datap, remainder);
ctx->gcm_remainder_len = remainder;
ctx->gcm_copy_to = datap;
goto out;
@@ -245,7 +245,7 @@ gcm_encrypt_final(gcm_ctx_t *ctx, crypto_data_t *out, size_t block_size,
(uint8_t *)ctx->gcm_tmp);
macp = (uint8_t *)ctx->gcm_remainder;
- bzero(macp + ctx->gcm_remainder_len,
+ memset(macp + ctx->gcm_remainder_len, 0,
block_size - ctx->gcm_remainder_len);
/* XOR with counter block */
@@ -309,8 +309,8 @@ gcm_decrypt_incomplete_block(gcm_ctx_t *ctx, size_t block_size, size_t index,
counterp = (uint8_t *)ctx->gcm_tmp;
/* authentication tag */
- bzero((uint8_t *)ctx->gcm_tmp, block_size);
- bcopy(datap, (uint8_t *)ctx->gcm_tmp, ctx->gcm_remainder_len);
+ memset((uint8_t *)ctx->gcm_tmp, 0, block_size);
+ memcpy((uint8_t *)ctx->gcm_tmp, datap, ctx->gcm_remainder_len);
/* add ciphertext to the hash */
GHASH(ctx, ctx->gcm_tmp, ctx->gcm_ghash, gcm_impl_get_ops());
@@ -350,7 +350,7 @@ gcm_mode_decrypt_contiguous_blocks(gcm_ctx_t *ctx, char *data, size_t length,
}
if (ctx->gcm_pt_buf != NULL) {
- bcopy(ctx->gcm_pt_buf, new, ctx->gcm_pt_buf_len);
+ memcpy(new, ctx->gcm_pt_buf, ctx->gcm_pt_buf_len);
vmem_free(ctx->gcm_pt_buf, ctx->gcm_pt_buf_len);
} else {
ASSERT0(ctx->gcm_pt_buf_len);
@@ -358,7 +358,7 @@ gcm_mode_decrypt_contiguous_blocks(gcm_ctx_t *ctx, char *data, size_t length,
ctx->gcm_pt_buf = new;
ctx->gcm_pt_buf_len = new_len;
- bcopy(data, &ctx->gcm_pt_buf[ctx->gcm_processed_data_len],
+ memcpy(&ctx->gcm_pt_buf[ctx->gcm_processed_data_len], data,
length);
ctx->gcm_processed_data_len += length;
}
@@ -397,7 +397,7 @@ gcm_decrypt_final(gcm_ctx_t *ctx, crypto_data_t *out, size_t block_size,
while (remainder > 0) {
/* Incomplete last block */
if (remainder < block_size) {
- bcopy(blockp, ctx->gcm_remainder, remainder);
+ memcpy(ctx->gcm_remainder, blockp, remainder);
ctx->gcm_remainder_len = remainder;
/*
* not expecting anymore ciphertext, just
@@ -438,7 +438,7 @@ out:
xor_block((uint8_t *)ctx->gcm_J0, ghash);
/* compare the input authentication tag with what we calculated */
- if (bcmp(&ctx->gcm_pt_buf[pt_len], ghash, ctx->gcm_tag_len)) {
+ if (memcmp(&ctx->gcm_pt_buf[pt_len], ghash, ctx->gcm_tag_len)) {
/* They don't match */
return (CRYPTO_INVALID_MAC);
} else {
@@ -495,7 +495,7 @@ gcm_format_initial_blocks(uchar_t *iv, ulong_t iv_len,
ghash = (uint8_t *)ctx->gcm_ghash;
cb = (uint8_t *)ctx->gcm_cb;
if (iv_len == 12) {
- bcopy(iv, cb, 12);
+ memcpy(cb, iv, 12);
cb[12] = 0;
cb[13] = 0;
cb[14] = 0;
@@ -506,8 +506,8 @@ gcm_format_initial_blocks(uchar_t *iv, ulong_t iv_len,
/* GHASH the IV */
do {
if (remainder < block_size) {
- bzero(cb, block_size);
- bcopy(&(iv[processed]), cb, remainder);
+ memset(cb, 0, block_size);
+ memcpy(cb, &(iv[processed]), remainder);
datap = (uint8_t *)cb;
remainder = 0;
} else {
@@ -539,7 +539,7 @@ gcm_init(gcm_ctx_t *ctx, unsigned char *iv, size_t iv_len,
size_t remainder, processed;
/* encrypt zero block to get subkey H */
- bzero(ctx->gcm_H, sizeof (ctx->gcm_H));
+ memset(ctx->gcm_H, 0, sizeof (ctx->gcm_H));
encrypt_block(ctx->gcm_keysched, (uint8_t *)ctx->gcm_H,
(uint8_t *)ctx->gcm_H);
@@ -549,8 +549,8 @@ gcm_init(gcm_ctx_t *ctx, unsigned char *iv, size_t iv_len,
gops = gcm_impl_get_ops();
authp = (uint8_t *)ctx->gcm_tmp;
ghash = (uint8_t *)ctx->gcm_ghash;
- bzero(authp, block_size);
- bzero(ghash, block_size);
+ memset(authp, 0, block_size);
+ memset(ghash, 0, block_size);
processed = 0;
remainder = auth_data_len;
@@ -562,9 +562,9 @@ gcm_init(gcm_ctx_t *ctx, unsigned char *iv, size_t iv_len,
*/
if (auth_data != NULL) {
- bzero(authp, block_size);
- bcopy(&(auth_data[processed]),
- authp, remainder);
+ memset(authp, 0, block_size);
+ memcpy(authp, &(auth_data[processed]),
+ remainder);
} else {
ASSERT0(remainder);
}
@@ -1139,10 +1139,10 @@ gcm_simd_get_htab_size(boolean_t simd_mode)
static inline void
gcm_clear_ctx(gcm_ctx_t *ctx)
{
- bzero(ctx->gcm_remainder, sizeof (ctx->gcm_remainder));
- bzero(ctx->gcm_H, sizeof (ctx->gcm_H));
- bzero(ctx->gcm_J0, sizeof (ctx->gcm_J0));
- bzero(ctx->gcm_tmp, sizeof (ctx->gcm_tmp));
+ memset(ctx->gcm_remainder, 0, sizeof (ctx->gcm_remainder));
+ memset(ctx->gcm_H, 0, sizeof (ctx->gcm_H));
+ memset(ctx->gcm_J0, 0, sizeof (ctx->gcm_J0));
+ memset(ctx->gcm_tmp, 0, sizeof (ctx->gcm_tmp));
}
/* Increment the GCM counter block by n. */
@@ -1187,8 +1187,8 @@ gcm_mode_encrypt_contiguous_blocks_avx(gcm_ctx_t *ctx, char *data,
need = block_size - ctx->gcm_remainder_len;
if (length < need) {
/* Accumulate bytes here and return. */
- bcopy(datap, (uint8_t *)ctx->gcm_remainder +
- ctx->gcm_remainder_len, length);
+ memcpy((uint8_t *)ctx->gcm_remainder +
+ ctx->gcm_remainder_len, datap, length);
ctx->gcm_remainder_len += length;
if (ctx->gcm_copy_to == NULL) {
@@ -1197,8 +1197,8 @@ gcm_mode_encrypt_contiguous_blocks_avx(gcm_ctx_t *ctx, char *data,
return (CRYPTO_SUCCESS);
} else {
/* Complete incomplete block. */
- bcopy(datap, (uint8_t *)ctx->gcm_remainder +
- ctx->gcm_remainder_len, need);
+ memcpy((uint8_t *)ctx->gcm_remainder +
+ ctx->gcm_remainder_len, datap, need);
ctx->gcm_copy_to = NULL;
}
@@ -1276,7 +1276,7 @@ gcm_mode_encrypt_contiguous_blocks_avx(gcm_ctx_t *ctx, char *data,
/* Less than GCM_AVX_MIN_ENCRYPT_BYTES remain, operate on blocks. */
while (bleft > 0) {
if (bleft < block_size) {
- bcopy(datap, ctx->gcm_remainder, bleft);
+ memcpy(ctx->gcm_remainder, datap, bleft);
ctx->gcm_remainder_len = bleft;
ctx->gcm_copy_to = datap;
goto out;
@@ -1335,7 +1335,7 @@ gcm_encrypt_final_avx(gcm_ctx_t *ctx, crypto_data_t *out, size_t block_size)
const uint32_t *cb = (uint32_t *)ctx->gcm_cb;
aes_encrypt_intel(keysched, aes_rounds, cb, (uint32_t *)tmp);
- bzero(remainder + rem_len, block_size - rem_len);
+ memset(remainder + rem_len, 0, block_size - rem_len);
for (int i = 0; i < rem_len; i++) {
remainder[i] ^= tmp[i];
}
@@ -1431,8 +1431,8 @@ gcm_decrypt_final_avx(gcm_ctx_t *ctx, crypto_data_t *out, size_t block_size)
if (bleft < block_size) {
uint8_t *lastb = (uint8_t *)ctx->gcm_remainder;
- bzero(lastb, block_size);
- bcopy(datap, lastb, bleft);
+ memset(lastb, 0, block_size);
+ memcpy(lastb, datap, bleft);
/* The GCM processing. */
GHASH_AVX(ctx, lastb, block_size);
aes_encrypt_intel(key->encr_ks.ks32, key->nr, cb, tmp);
@@ -1468,7 +1468,7 @@ gcm_decrypt_final_avx(gcm_ctx_t *ctx, crypto_data_t *out, size_t block_size)
kfpu_end();
/* Compare the input authentication tag with what we calculated. */
- if (bcmp(&ctx->gcm_pt_buf[pt_len], ghash, ctx->gcm_tag_len)) {
+ if (memcmp(&ctx->gcm_pt_buf[pt_len], ghash, ctx->gcm_tag_len)) {
/* They don't match. */
return (CRYPTO_INVALID_MAC);
}
@@ -1500,8 +1500,8 @@ gcm_init_avx(gcm_ctx_t *ctx, unsigned char *iv, size_t iv_len,
ASSERT(block_size == GCM_BLOCK_LEN);
/* Init H (encrypt zero block) and create the initial counter block. */
- bzero(ctx->gcm_ghash, sizeof (ctx->gcm_ghash));
- bzero(H, sizeof (ctx->gcm_H));
+ memset(ctx->gcm_ghash, 0, sizeof (ctx->gcm_ghash));
+ memset(H, 0, sizeof (ctx->gcm_H));
kfpu_begin();
aes_encrypt_intel(keysched, aes_rounds,
(const uint32_t *)H, (uint32_t *)H);
@@ -1509,13 +1509,13 @@ gcm_init_avx(gcm_ctx_t *ctx, unsigned char *iv, size_t iv_len,
gcm_init_htab_avx(ctx->gcm_Htable, H);
if (iv_len == 12) {
- bcopy(iv, cb, 12);
+ memcpy(cb, iv, 12);
cb[12] = 0;
cb[13] = 0;
cb[14] = 0;
cb[15] = 1;
/* We need the ICB later. */
- bcopy(cb, ctx->gcm_J0, sizeof (ctx->gcm_J0));
+ memcpy(ctx->gcm_J0, cb, sizeof (ctx->gcm_J0));
} else {
/*
* Most consumers use 12 byte IVs, so it's OK to use the
@@ -1553,8 +1553,8 @@ gcm_init_avx(gcm_ctx_t *ctx, unsigned char *iv, size_t iv_len,
/* Zero pad and hash incomplete last block. */
uint8_t *authp = (uint8_t *)ctx->gcm_tmp;
- bzero(authp, block_size);
- bcopy(datap, authp, incomp);
+ memset(authp, 0, block_size);
+ memcpy(authp, datap, incomp);
GHASH_AVX(ctx, authp, block_size);
}
}
diff --git a/module/icp/algs/modes/modes.c b/module/icp/algs/modes/modes.c
index 59743c7d6..d505de40e 100644
--- a/module/icp/algs/modes/modes.c
+++ b/module/icp/algs/modes/modes.c
@@ -155,7 +155,7 @@ crypto_free_mode_ctx(void *ctx)
#ifdef CAN_USE_GCM_ASM
if (((gcm_ctx_t *)ctx)->gcm_Htable != NULL) {
gcm_ctx_t *gcm_ctx = (gcm_ctx_t *)ctx;
- bzero(gcm_ctx->gcm_Htable, gcm_ctx->gcm_htab_len);
+ memset(gcm_ctx->gcm_Htable, 0, gcm_ctx->gcm_htab_len);
kmem_free(gcm_ctx->gcm_Htable, gcm_ctx->gcm_htab_len);
}
#endif