icp: remove unusued incremental cipher methods

Sponsored-by: Klara, Inc.
Sponsored-by: Wasabi Technology, Inc.
Reviewed-by: Brian Behlendorf <behlendorf1@llnl.gov>
Signed-off-by: Rob Norris <rob.norris@klarasystems.com>
Closes #16209
This commit is contained in:
Rob Norris 2024-05-18 22:17:36 +10:00 committed by Brian Behlendorf
parent 57249bcddc
commit 4ed91dc26e
3 changed files with 3 additions and 514 deletions

View File

@ -222,8 +222,7 @@ kcf_add_mech_provider(short mech_indx,
if (fg & CRYPTO_FG_DIGEST || fg & CRYPTO_FG_DIGEST_ATOMIC) if (fg & CRYPTO_FG_DIGEST || fg & CRYPTO_FG_DIGEST_ATOMIC)
class = KCF_DIGEST_CLASS; class = KCF_DIGEST_CLASS;
else if (fg & CRYPTO_FG_ENCRYPT || fg & CRYPTO_FG_DECRYPT || else if (fg & CRYPTO_FG_ENCRYPT_ATOMIC ||
fg & CRYPTO_FG_ENCRYPT_ATOMIC ||
fg & CRYPTO_FG_DECRYPT_ATOMIC) fg & CRYPTO_FG_DECRYPT_ATOMIC)
class = KCF_CIPHER_CLASS; class = KCF_CIPHER_CLASS;
else if (fg & CRYPTO_FG_MAC || fg & CRYPTO_FG_MAC_ATOMIC) else if (fg & CRYPTO_FG_MAC || fg & CRYPTO_FG_MAC_ATOMIC)

View File

@ -89,27 +89,8 @@ typedef struct crypto_digest_ops {
* with the kernel using crypto_register_provider(9F). * with the kernel using crypto_register_provider(9F).
*/ */
typedef struct crypto_cipher_ops { typedef struct crypto_cipher_ops {
int (*encrypt_init)(crypto_ctx_t *,
crypto_mechanism_t *, crypto_key_t *,
crypto_spi_ctx_template_t);
int (*encrypt)(crypto_ctx_t *,
crypto_data_t *, crypto_data_t *);
int (*encrypt_update)(crypto_ctx_t *,
crypto_data_t *, crypto_data_t *);
int (*encrypt_final)(crypto_ctx_t *,
crypto_data_t *);
int (*encrypt_atomic)(crypto_mechanism_t *, crypto_key_t *, int (*encrypt_atomic)(crypto_mechanism_t *, crypto_key_t *,
crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t); crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
int (*decrypt_init)(crypto_ctx_t *,
crypto_mechanism_t *, crypto_key_t *,
crypto_spi_ctx_template_t);
int (*decrypt)(crypto_ctx_t *,
crypto_data_t *, crypto_data_t *);
int (*decrypt_update)(crypto_ctx_t *,
crypto_data_t *, crypto_data_t *);
int (*decrypt_final)(crypto_ctx_t *,
crypto_data_t *);
int (*decrypt_atomic)(crypto_mechanism_t *, crypto_key_t *, int (*decrypt_atomic)(crypto_mechanism_t *, crypto_key_t *,
crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t); crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
} __no_const crypto_cipher_ops_t; } __no_const crypto_cipher_ops_t;
@ -172,8 +153,6 @@ typedef struct crypto_ops {
typedef uint32_t crypto_func_group_t; typedef uint32_t crypto_func_group_t;
#define CRYPTO_FG_ENCRYPT 0x00000001 /* encrypt_init() */
#define CRYPTO_FG_DECRYPT 0x00000002 /* decrypt_init() */
#define CRYPTO_FG_DIGEST 0x00000004 /* digest_init() */ #define CRYPTO_FG_DIGEST 0x00000004 /* digest_init() */
#define CRYPTO_FG_MAC 0x00001000 /* mac_init() */ #define CRYPTO_FG_MAC 0x00001000 /* mac_init() */
#define CRYPTO_FG_ENCRYPT_ATOMIC 0x00008000 /* encrypt_atomic() */ #define CRYPTO_FG_ENCRYPT_ATOMIC 0x00008000 /* encrypt_atomic() */

View File

@ -42,47 +42,23 @@
static const crypto_mech_info_t aes_mech_info_tab[] = { static const crypto_mech_info_t aes_mech_info_tab[] = {
/* AES_CCM */ /* AES_CCM */
{SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE, {SUN_CKM_AES_CCM, AES_CCM_MECH_INFO_TYPE,
CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | CRYPTO_FG_ENCRYPT_ATOMIC | CRYPTO_FG_DECRYPT_ATOMIC},
CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC},
/* AES_GCM */ /* AES_GCM */
{SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE, {SUN_CKM_AES_GCM, AES_GCM_MECH_INFO_TYPE,
CRYPTO_FG_ENCRYPT | CRYPTO_FG_ENCRYPT_ATOMIC | CRYPTO_FG_ENCRYPT_ATOMIC | CRYPTO_FG_DECRYPT_ATOMIC},
CRYPTO_FG_DECRYPT | CRYPTO_FG_DECRYPT_ATOMIC},
}; };
static int aes_encrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
crypto_key_t *, crypto_spi_ctx_template_t);
static int aes_decrypt_init(crypto_ctx_t *, crypto_mechanism_t *,
crypto_key_t *, crypto_spi_ctx_template_t);
static int aes_common_init(crypto_ctx_t *, crypto_mechanism_t *,
crypto_key_t *, crypto_spi_ctx_template_t, boolean_t);
static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *, static int aes_common_init_ctx(aes_ctx_t *, crypto_spi_ctx_template_t *,
crypto_mechanism_t *, crypto_key_t *, int, boolean_t); crypto_mechanism_t *, crypto_key_t *, int, boolean_t);
static int aes_encrypt_final(crypto_ctx_t *, crypto_data_t *);
static int aes_decrypt_final(crypto_ctx_t *, crypto_data_t *);
static int aes_encrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *);
static int aes_encrypt_update(crypto_ctx_t *, crypto_data_t *,
crypto_data_t *);
static int aes_encrypt_atomic(crypto_mechanism_t *, crypto_key_t *, static int aes_encrypt_atomic(crypto_mechanism_t *, crypto_key_t *,
crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t); crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
static int aes_decrypt(crypto_ctx_t *, crypto_data_t *, crypto_data_t *);
static int aes_decrypt_update(crypto_ctx_t *, crypto_data_t *,
crypto_data_t *);
static int aes_decrypt_atomic(crypto_mechanism_t *, crypto_key_t *, static int aes_decrypt_atomic(crypto_mechanism_t *, crypto_key_t *,
crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t); crypto_data_t *, crypto_data_t *, crypto_spi_ctx_template_t);
static const crypto_cipher_ops_t aes_cipher_ops = { static const crypto_cipher_ops_t aes_cipher_ops = {
.encrypt_init = aes_encrypt_init,
.encrypt = aes_encrypt,
.encrypt_update = aes_encrypt_update,
.encrypt_final = aes_encrypt_final,
.encrypt_atomic = aes_encrypt_atomic, .encrypt_atomic = aes_encrypt_atomic,
.decrypt_init = aes_decrypt_init,
.decrypt = aes_decrypt,
.decrypt_update = aes_decrypt_update,
.decrypt_final = aes_decrypt_final,
.decrypt_atomic = aes_decrypt_atomic .decrypt_atomic = aes_decrypt_atomic
}; };
@ -190,474 +166,9 @@ init_keysched(crypto_key_t *key, void *newbie)
return (CRYPTO_SUCCESS); return (CRYPTO_SUCCESS);
} }
static int
aes_encrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
crypto_key_t *key, crypto_spi_ctx_template_t template)
{
return (aes_common_init(ctx, mechanism, key, template, B_TRUE));
}
static int
aes_decrypt_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
crypto_key_t *key, crypto_spi_ctx_template_t template)
{
return (aes_common_init(ctx, mechanism, key, template, B_FALSE));
}
/* /*
* KCF software provider encrypt entry points. * KCF software provider encrypt entry points.
*/ */
static int
aes_common_init(crypto_ctx_t *ctx, crypto_mechanism_t *mechanism,
crypto_key_t *key, crypto_spi_ctx_template_t template,
boolean_t is_encrypt_init)
{
aes_ctx_t *aes_ctx;
int rv;
if ((rv = aes_check_mech_param(mechanism, &aes_ctx))
!= CRYPTO_SUCCESS)
return (rv);
rv = aes_common_init_ctx(aes_ctx, template, mechanism, key, KM_SLEEP,
is_encrypt_init);
if (rv != CRYPTO_SUCCESS) {
crypto_free_mode_ctx(aes_ctx);
return (rv);
}
ctx->cc_provider_private = aes_ctx;
return (CRYPTO_SUCCESS);
}
static int
aes_encrypt(crypto_ctx_t *ctx, crypto_data_t *plaintext,
crypto_data_t *ciphertext)
{
int ret = CRYPTO_FAILED;
aes_ctx_t *aes_ctx;
size_t saved_length, saved_offset, length_needed;
ASSERT(ctx->cc_provider_private != NULL);
aes_ctx = ctx->cc_provider_private;
ASSERT(ciphertext != NULL);
/*
* We need to just return the length needed to store the output.
* We should not destroy the context for the following case.
*/
switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE)) {
case CCM_MODE:
length_needed = plaintext->cd_length + aes_ctx->ac_mac_len;
break;
case GCM_MODE:
length_needed = plaintext->cd_length + aes_ctx->ac_tag_len;
break;
default:
__builtin_unreachable();
}
if (ciphertext->cd_length < length_needed) {
ciphertext->cd_length = length_needed;
return (CRYPTO_BUFFER_TOO_SMALL);
}
saved_length = ciphertext->cd_length;
saved_offset = ciphertext->cd_offset;
/*
* Do an update on the specified input data.
*/
ret = aes_encrypt_update(ctx, plaintext, ciphertext);
if (ret != CRYPTO_SUCCESS) {
return (ret);
}
/*
* For CCM mode, aes_ccm_encrypt_final() will take care of any
* left-over unprocessed data, and compute the MAC
*/
if (aes_ctx->ac_flags & CCM_MODE) {
/*
* ccm_encrypt_final() will compute the MAC and append
* it to existing ciphertext. So, need to adjust the left over
* length value accordingly
*/
/* order of following 2 lines MUST not be reversed */
ciphertext->cd_offset = ciphertext->cd_length;
ciphertext->cd_length = saved_length - ciphertext->cd_length;
ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, ciphertext,
AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
if (ret != CRYPTO_SUCCESS) {
return (ret);
}
if (plaintext != ciphertext) {
ciphertext->cd_length =
ciphertext->cd_offset - saved_offset;
}
ciphertext->cd_offset = saved_offset;
} else if (aes_ctx->ac_flags & GCM_MODE) {
/*
* gcm_encrypt_final() will compute the MAC and append
* it to existing ciphertext. So, need to adjust the left over
* length value accordingly
*/
/* order of following 2 lines MUST not be reversed */
ciphertext->cd_offset = ciphertext->cd_length;
ciphertext->cd_length = saved_length - ciphertext->cd_length;
ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, ciphertext,
AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
aes_xor_block);
if (ret != CRYPTO_SUCCESS) {
return (ret);
}
if (plaintext != ciphertext) {
ciphertext->cd_length =
ciphertext->cd_offset - saved_offset;
}
ciphertext->cd_offset = saved_offset;
}
ASSERT(aes_ctx->ac_remainder_len == 0);
(void) aes_free_context(ctx);
return (ret);
}
static int
aes_decrypt(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
crypto_data_t *plaintext)
{
int ret = CRYPTO_FAILED;
aes_ctx_t *aes_ctx;
off_t saved_offset;
size_t saved_length, length_needed;
ASSERT(ctx->cc_provider_private != NULL);
aes_ctx = ctx->cc_provider_private;
ASSERT(plaintext != NULL);
/*
* Return length needed to store the output.
* Do not destroy context when plaintext buffer is too small.
*
* CCM: plaintext is MAC len smaller than cipher text
* GCM: plaintext is TAG len smaller than cipher text
*/
switch (aes_ctx->ac_flags & (CCM_MODE|GCM_MODE)) {
case CCM_MODE:
length_needed = aes_ctx->ac_processed_data_len;
break;
case GCM_MODE:
length_needed = ciphertext->cd_length - aes_ctx->ac_tag_len;
break;
default:
__builtin_unreachable();
}
if (plaintext->cd_length < length_needed) {
plaintext->cd_length = length_needed;
return (CRYPTO_BUFFER_TOO_SMALL);
}
saved_offset = plaintext->cd_offset;
saved_length = plaintext->cd_length;
/*
* Do an update on the specified input data.
*/
ret = aes_decrypt_update(ctx, ciphertext, plaintext);
if (ret != CRYPTO_SUCCESS) {
goto cleanup;
}
if (aes_ctx->ac_flags & CCM_MODE) {
ASSERT(aes_ctx->ac_processed_data_len == aes_ctx->ac_data_len);
ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
/* order of following 2 lines MUST not be reversed */
plaintext->cd_offset = plaintext->cd_length;
plaintext->cd_length = saved_length - plaintext->cd_length;
ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, plaintext,
AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
aes_xor_block);
if (ret == CRYPTO_SUCCESS) {
if (plaintext != ciphertext) {
plaintext->cd_length =
plaintext->cd_offset - saved_offset;
}
} else {
plaintext->cd_length = saved_length;
}
plaintext->cd_offset = saved_offset;
} else if (aes_ctx->ac_flags & GCM_MODE) {
/* order of following 2 lines MUST not be reversed */
plaintext->cd_offset = plaintext->cd_length;
plaintext->cd_length = saved_length - plaintext->cd_length;
ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, plaintext,
AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
if (ret == CRYPTO_SUCCESS) {
if (plaintext != ciphertext) {
plaintext->cd_length =
plaintext->cd_offset - saved_offset;
}
} else {
plaintext->cd_length = saved_length;
}
plaintext->cd_offset = saved_offset;
}
ASSERT(aes_ctx->ac_remainder_len == 0);
cleanup:
(void) aes_free_context(ctx);
return (ret);
}
static int
aes_encrypt_update(crypto_ctx_t *ctx, crypto_data_t *plaintext,
crypto_data_t *ciphertext)
{
off_t saved_offset;
size_t saved_length, out_len;
int ret = CRYPTO_SUCCESS;
aes_ctx_t *aes_ctx;
ASSERT(ctx->cc_provider_private != NULL);
aes_ctx = ctx->cc_provider_private;
ASSERT(ciphertext != NULL);
/* compute number of bytes that will hold the ciphertext */
out_len = aes_ctx->ac_remainder_len;
out_len += plaintext->cd_length;
out_len &= ~(AES_BLOCK_LEN - 1);
/* return length needed to store the output */
if (ciphertext->cd_length < out_len) {
ciphertext->cd_length = out_len;
return (CRYPTO_BUFFER_TOO_SMALL);
}
saved_offset = ciphertext->cd_offset;
saved_length = ciphertext->cd_length;
/*
* Do the AES update on the specified input data.
*/
switch (plaintext->cd_format) {
case CRYPTO_DATA_RAW:
ret = crypto_update_iov(ctx->cc_provider_private,
plaintext, ciphertext, aes_encrypt_contiguous_blocks);
break;
case CRYPTO_DATA_UIO:
ret = crypto_update_uio(ctx->cc_provider_private,
plaintext, ciphertext, aes_encrypt_contiguous_blocks);
break;
default:
ret = CRYPTO_ARGUMENTS_BAD;
}
if (ret == CRYPTO_SUCCESS) {
if (plaintext != ciphertext)
ciphertext->cd_length =
ciphertext->cd_offset - saved_offset;
} else {
ciphertext->cd_length = saved_length;
}
ciphertext->cd_offset = saved_offset;
return (ret);
}
static int
aes_decrypt_update(crypto_ctx_t *ctx, crypto_data_t *ciphertext,
crypto_data_t *plaintext)
{
off_t saved_offset;
size_t saved_length;
int ret = CRYPTO_SUCCESS;
ASSERT(ctx->cc_provider_private != NULL);
ASSERT(plaintext != NULL);
saved_offset = plaintext->cd_offset;
saved_length = plaintext->cd_length;
/*
* Do the AES update on the specified input data.
*/
switch (ciphertext->cd_format) {
case CRYPTO_DATA_RAW:
ret = crypto_update_iov(ctx->cc_provider_private,
ciphertext, plaintext, aes_decrypt_contiguous_blocks);
break;
case CRYPTO_DATA_UIO:
ret = crypto_update_uio(ctx->cc_provider_private,
ciphertext, plaintext, aes_decrypt_contiguous_blocks);
break;
default:
ret = CRYPTO_ARGUMENTS_BAD;
}
if (ret == CRYPTO_SUCCESS) {
if (ciphertext != plaintext)
plaintext->cd_length =
plaintext->cd_offset - saved_offset;
} else {
plaintext->cd_length = saved_length;
}
plaintext->cd_offset = saved_offset;
return (ret);
}
static int
aes_encrypt_final(crypto_ctx_t *ctx, crypto_data_t *data)
{
aes_ctx_t *aes_ctx;
int ret;
ASSERT(ctx->cc_provider_private != NULL);
aes_ctx = ctx->cc_provider_private;
if (data->cd_format != CRYPTO_DATA_RAW &&
data->cd_format != CRYPTO_DATA_UIO) {
return (CRYPTO_ARGUMENTS_BAD);
}
if (aes_ctx->ac_flags & CCM_MODE) {
ret = ccm_encrypt_final((ccm_ctx_t *)aes_ctx, data,
AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
if (ret != CRYPTO_SUCCESS) {
return (ret);
}
} else if (aes_ctx->ac_flags & GCM_MODE) {
size_t saved_offset = data->cd_offset;
ret = gcm_encrypt_final((gcm_ctx_t *)aes_ctx, data,
AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
aes_xor_block);
if (ret != CRYPTO_SUCCESS) {
return (ret);
}
data->cd_length = data->cd_offset - saved_offset;
data->cd_offset = saved_offset;
}
(void) aes_free_context(ctx);
return (CRYPTO_SUCCESS);
}
static int
aes_decrypt_final(crypto_ctx_t *ctx, crypto_data_t *data)
{
aes_ctx_t *aes_ctx;
int ret;
off_t saved_offset;
size_t saved_length;
ASSERT(ctx->cc_provider_private != NULL);
aes_ctx = ctx->cc_provider_private;
if (data->cd_format != CRYPTO_DATA_RAW &&
data->cd_format != CRYPTO_DATA_UIO) {
return (CRYPTO_ARGUMENTS_BAD);
}
/*
* There must be no unprocessed ciphertext.
* This happens if the length of the last ciphertext is
* not a multiple of the AES block length.
*/
if (aes_ctx->ac_remainder_len > 0)
return (CRYPTO_ENCRYPTED_DATA_LEN_RANGE);
if (aes_ctx->ac_flags & CCM_MODE) {
/*
* This is where all the plaintext is returned, make sure
* the plaintext buffer is big enough
*/
size_t pt_len = aes_ctx->ac_data_len;
if (data->cd_length < pt_len) {
data->cd_length = pt_len;
return (CRYPTO_BUFFER_TOO_SMALL);
}
ASSERT(aes_ctx->ac_processed_data_len == pt_len);
ASSERT(aes_ctx->ac_processed_mac_len == aes_ctx->ac_mac_len);
saved_offset = data->cd_offset;
saved_length = data->cd_length;
ret = ccm_decrypt_final((ccm_ctx_t *)aes_ctx, data,
AES_BLOCK_LEN, aes_encrypt_block, aes_copy_block,
aes_xor_block);
if (ret == CRYPTO_SUCCESS) {
data->cd_length = data->cd_offset - saved_offset;
} else {
data->cd_length = saved_length;
}
data->cd_offset = saved_offset;
if (ret != CRYPTO_SUCCESS) {
return (ret);
}
} else if (aes_ctx->ac_flags & GCM_MODE) {
/*
* This is where all the plaintext is returned, make sure
* the plaintext buffer is big enough
*/
gcm_ctx_t *ctx = (gcm_ctx_t *)aes_ctx;
size_t pt_len = ctx->gcm_processed_data_len - ctx->gcm_tag_len;
if (data->cd_length < pt_len) {
data->cd_length = pt_len;
return (CRYPTO_BUFFER_TOO_SMALL);
}
saved_offset = data->cd_offset;
saved_length = data->cd_length;
ret = gcm_decrypt_final((gcm_ctx_t *)aes_ctx, data,
AES_BLOCK_LEN, aes_encrypt_block, aes_xor_block);
if (ret == CRYPTO_SUCCESS) {
data->cd_length = data->cd_offset - saved_offset;
} else {
data->cd_length = saved_length;
}
data->cd_offset = saved_offset;
if (ret != CRYPTO_SUCCESS) {
return (ret);
}
}
(void) aes_free_context(ctx);
return (CRYPTO_SUCCESS);
}
static int static int
aes_encrypt_atomic(crypto_mechanism_t *mechanism, aes_encrypt_atomic(crypto_mechanism_t *mechanism,
crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext, crypto_key_t *key, crypto_data_t *plaintext, crypto_data_t *ciphertext,