gh-131316: handle NULL values returned by HACL* functions (#131324)

- Handle NULL returned by allocation functions.
- Handle NULL returned by copy functions.
- Suppress unused impossible return codes.
This commit is contained in:
Bénédikt Tran 2025-03-17 11:10:39 +01:00 committed by GitHub
parent de8890f5ab
commit 261633bd3f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 312 additions and 145 deletions

View file

@ -395,26 +395,33 @@ new_Blake2Object(PyTypeObject *type)
* 64 bits so we loop in <4gig chunks when needed. */
#if PY_SSIZE_T_MAX > UINT32_MAX
#define HACL_UPDATE_LOOP(update,state,buf,len) \
while (len > UINT32_MAX) { \
update(state, buf, UINT32_MAX); \
len -= UINT32_MAX; \
buf += UINT32_MAX; \
}
# define HACL_UPDATE_LOOP(UPDATE_FUNC, STATE, BUF, LEN) \
do { \
while (LEN > UINT32_MAX) { \
(void)UPDATE_FUNC(STATE, BUF, UINT32_MAX); \
LEN -= UINT32_MAX; \
BUF += UINT32_MAX; \
} \
} while (0)
#else
#define HACL_UPDATE_LOOP(update,state,buf,len)
# define HACL_UPDATE_LOOP(...)
#endif
#define HACL_UPDATE(update,state,buf,len) do { \
/* Note: we explicitly ignore the error code on the basis that it would take >
* 1 billion years to overflow the maximum admissible length for SHA2-256
* (namely, 2^61-1 bytes). */ \
HACL_UPDATE_LOOP(update,state,buf,len) \
/* Cast to uint32_t is safe: len <= UINT32_MAX at this point. */ \
update(state, buf, (uint32_t) len); \
} while (0)
/*
* Note: we explicitly ignore the error code on the basis that it would take
* more than 1 billion years to overflow the maximum admissible length for
* blake2b/2s (2^64 - 1).
*/
#define HACL_UPDATE(UPDATE_FUNC, STATE, BUF, LEN) \
do { \
HACL_UPDATE_LOOP(UPDATE_FUNC, STATE, BUF, LEN); \
/* cast to uint32_t is now safe */ \
(void)UPDATE_FUNC(STATE, BUF, (uint32_t)LEN); \
} while (0)
static void update(Blake2Object *self, uint8_t *buf, Py_ssize_t len) {
static void
update(Blake2Object *self, uint8_t *buf, Py_ssize_t len)
{
switch (self->impl) {
// These need to be ifdef'd out otherwise it's an unresolved symbol at
// link-time.
@ -583,21 +590,41 @@ py_blake2b_or_s_new(PyTypeObject *type, PyObject *data, int digest_size,
switch (self->impl) {
#if HACL_CAN_COMPILE_SIMD256
case Blake2b_256:
case Blake2b_256: {
self->blake2b_256_state = Hacl_Hash_Blake2b_Simd256_malloc_with_params_and_key(&params, last_node, key->buf);
if (self->blake2b_256_state == NULL) {
(void)PyErr_NoMemory();
goto error;
}
break;
}
#endif
#if HACL_CAN_COMPILE_SIMD128
case Blake2s_128:
case Blake2s_128: {
self->blake2s_128_state = Hacl_Hash_Blake2s_Simd128_malloc_with_params_and_key(&params, last_node, key->buf);
if (self->blake2s_128_state == NULL) {
(void)PyErr_NoMemory();
goto error;
}
break;
}
#endif
case Blake2b:
case Blake2b: {
self->blake2b_state = Hacl_Hash_Blake2b_malloc_with_params_and_key(&params, last_node, key->buf);
if (self->blake2b_state == NULL) {
(void)PyErr_NoMemory();
goto error;
}
break;
case Blake2s:
}
case Blake2s: {
self->blake2s_state = Hacl_Hash_Blake2s_malloc_with_params_and_key(&params, last_node, key->buf);
if (self->blake2s_state == NULL) {
(void)PyErr_NoMemory();
goto error;
}
break;
}
default:
Py_UNREACHABLE();
}
@ -610,7 +637,8 @@ py_blake2b_or_s_new(PyTypeObject *type, PyObject *data, int digest_size,
Py_BEGIN_ALLOW_THREADS
update(self, buf.buf, buf.len);
Py_END_ALLOW_THREADS
} else {
}
else {
update(self, buf.buf, buf.len);
}
PyBuffer_Release(&buf);
@ -688,6 +716,54 @@ py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
return py_blake2b_or_s_new(type, data, digest_size, key, salt, person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity);
}
static int
blake2_blake2b_copy_locked(Blake2Object *self, Blake2Object *cpy)
{
assert(cpy != NULL);
switch (self->impl) {
#if HACL_CAN_COMPILE_SIMD256
case Blake2b_256: {
cpy->blake2b_256_state = Hacl_Hash_Blake2b_Simd256_copy(self->blake2b_256_state);
if (cpy->blake2b_256_state == NULL) {
goto error;
}
break;
}
#endif
#if HACL_CAN_COMPILE_SIMD128
case Blake2s_128: {
cpy->blake2s_128_state = Hacl_Hash_Blake2s_Simd128_copy(self->blake2s_128_state);
if (cpy->blake2s_128_state == NULL) {
goto error;
}
break;
}
#endif
case Blake2b: {
cpy->blake2b_state = Hacl_Hash_Blake2b_copy(self->blake2b_state);
if (cpy->blake2b_state == NULL) {
goto error;
}
break;
}
case Blake2s: {
cpy->blake2s_state = Hacl_Hash_Blake2s_copy(self->blake2s_state);
if (cpy->blake2s_state == NULL) {
goto error;
}
break;
}
default:
Py_UNREACHABLE();
}
cpy->impl = self->impl;
return 0;
error:
(void)PyErr_NoMemory();
return -1;
}
/*[clinic input]
_blake2.blake2b.copy
@ -698,34 +774,20 @@ static PyObject *
_blake2_blake2b_copy_impl(Blake2Object *self)
/*[clinic end generated code: output=622d1c56b91c50d8 input=e383c2d199fd8a2e]*/
{
int rc;
Blake2Object *cpy;
if ((cpy = new_Blake2Object(Py_TYPE(self))) == NULL)
if ((cpy = new_Blake2Object(Py_TYPE(self))) == NULL) {
return NULL;
}
ENTER_HASHLIB(self);
switch (self->impl) {
#if HACL_CAN_COMPILE_SIMD256
case Blake2b_256:
cpy->blake2b_256_state = Hacl_Hash_Blake2b_Simd256_copy(self->blake2b_256_state);
break;
#endif
#if HACL_CAN_COMPILE_SIMD128
case Blake2s_128:
cpy->blake2s_128_state = Hacl_Hash_Blake2s_Simd128_copy(self->blake2s_128_state);
break;
#endif
case Blake2b:
cpy->blake2b_state = Hacl_Hash_Blake2b_copy(self->blake2b_state);
break;
case Blake2s:
cpy->blake2s_state = Hacl_Hash_Blake2s_copy(self->blake2s_state);
break;
default:
Py_UNREACHABLE();
}
cpy->impl = self->impl;
rc = blake2_blake2b_copy_locked(self, cpy);
LEAVE_HASHLIB(self);
if (rc < 0) {
Py_DECREF(cpy);
return NULL;
}
return (PyObject *)cpy;
}

View file

@ -121,12 +121,17 @@ MD5Type_copy_impl(MD5object *self, PyTypeObject *cls)
MD5State *st = PyType_GetModuleState(cls);
MD5object *newobj;
if ((newobj = newMD5object(st))==NULL)
if ((newobj = newMD5object(st)) == NULL) {
return NULL;
}
ENTER_HASHLIB(self);
newobj->hash_state = Hacl_Hash_MD5_copy(self->hash_state);
LEAVE_HASHLIB(self);
if (newobj->hash_state == NULL) {
Py_DECREF(self);
return PyErr_NoMemory();
}
return (PyObject *)newobj;
}
@ -173,15 +178,23 @@ MD5Type_hexdigest_impl(MD5object *self)
return PyUnicode_FromStringAndSize(digest_hex, sizeof(digest_hex));
}
static void update(Hacl_Hash_MD5_state_t *state, uint8_t *buf, Py_ssize_t len) {
static void
update(Hacl_Hash_MD5_state_t *state, uint8_t *buf, Py_ssize_t len)
{
/*
* Note: we explicitly ignore the error code on the basis that it would
* take more than 1 billion years to overflow the maximum admissible length
* for MD5 (2^61 - 1).
*/
#if PY_SSIZE_T_MAX > UINT32_MAX
while (len > UINT32_MAX) {
Hacl_Hash_MD5_update(state, buf, UINT32_MAX);
len -= UINT32_MAX;
buf += UINT32_MAX;
}
while (len > UINT32_MAX) {
(void)Hacl_Hash_MD5_update(state, buf, UINT32_MAX);
len -= UINT32_MAX;
buf += UINT32_MAX;
}
#endif
Hacl_Hash_MD5_update(state, buf, (uint32_t) len);
/* cast to uint32_t is now safe */
(void)Hacl_Hash_MD5_update(state, buf, (uint32_t)len);
}
/*[clinic input]
@ -286,24 +299,27 @@ _md5_md5_impl(PyObject *module, PyObject *string, int usedforsecurity)
MD5object *new;
Py_buffer buf;
if (string)
if (string) {
GET_BUFFER_VIEW_OR_ERROUT(string, &buf);
}
MD5State *st = md5_get_state(module);
if ((new = newMD5object(st)) == NULL) {
if (string)
if (string) {
PyBuffer_Release(&buf);
}
return NULL;
}
new->hash_state = Hacl_Hash_MD5_malloc();
if (PyErr_Occurred()) {
if (new->hash_state == NULL) {
Py_DECREF(new);
if (string)
if (string) {
PyBuffer_Release(&buf);
return NULL;
}
return PyErr_NoMemory();
}
if (string) {
if (buf.len >= HASHLIB_GIL_MINSIZE) {
/* We do not initialize self->lock here as this is the constructor
@ -311,7 +327,8 @@ _md5_md5_impl(PyObject *module, PyObject *string, int usedforsecurity)
Py_BEGIN_ALLOW_THREADS
update(new->hash_state, buf.buf, buf.len);
Py_END_ALLOW_THREADS
} else {
}
else {
update(new->hash_state, buf.buf, buf.len);
}
PyBuffer_Release(&buf);

View file

@ -98,7 +98,10 @@ static void
SHA1_dealloc(PyObject *op)
{
SHA1object *ptr = _SHA1object_CAST(op);
Hacl_Hash_SHA1_free(ptr->hash_state);
if (ptr->hash_state != NULL) {
Hacl_Hash_SHA1_free(ptr->hash_state);
ptr->hash_state = NULL;
}
PyTypeObject *tp = Py_TYPE(ptr);
PyObject_GC_UnTrack(ptr);
PyObject_GC_Del(ptr);
@ -123,12 +126,17 @@ SHA1Type_copy_impl(SHA1object *self, PyTypeObject *cls)
SHA1State *st = _PyType_GetModuleState(cls);
SHA1object *newobj;
if ((newobj = newSHA1object(st)) == NULL)
if ((newobj = newSHA1object(st)) == NULL) {
return NULL;
}
ENTER_HASHLIB(self);
newobj->hash_state = Hacl_Hash_SHA1_copy(self->hash_state);
LEAVE_HASHLIB(self);
if (newobj->hash_state == NULL) {
Py_DECREF(newobj);
return PyErr_NoMemory();
}
return (PyObject *)newobj;
}
@ -166,15 +174,23 @@ SHA1Type_hexdigest_impl(SHA1object *self)
return _Py_strhex((const char *)digest, SHA1_DIGESTSIZE);
}
static void update(Hacl_Hash_SHA1_state_t *state, uint8_t *buf, Py_ssize_t len) {
static void
update(Hacl_Hash_SHA1_state_t *state, uint8_t *buf, Py_ssize_t len)
{
/*
* Note: we explicitly ignore the error code on the basis that it would
* take more than 1 billion years to overflow the maximum admissible length
* for SHA-1 (2^61 - 1).
*/
#if PY_SSIZE_T_MAX > UINT32_MAX
while (len > UINT32_MAX) {
Hacl_Hash_SHA1_update(state, buf, UINT32_MAX);
len -= UINT32_MAX;
buf += UINT32_MAX;
}
while (len > UINT32_MAX) {
(void)Hacl_Hash_SHA1_update(state, buf, UINT32_MAX);
len -= UINT32_MAX;
buf += UINT32_MAX;
}
#endif
Hacl_Hash_SHA1_update(state, buf, (uint32_t) len);
/* cast to uint32_t is now safe */
(void)Hacl_Hash_SHA1_update(state, buf, (uint32_t)len);
}
/*[clinic input]
@ -279,23 +295,26 @@ _sha1_sha1_impl(PyObject *module, PyObject *string, int usedforsecurity)
SHA1object *new;
Py_buffer buf;
if (string)
if (string) {
GET_BUFFER_VIEW_OR_ERROUT(string, &buf);
}
SHA1State *st = sha1_get_state(module);
if ((new = newSHA1object(st)) == NULL) {
if (string)
if (string) {
PyBuffer_Release(&buf);
}
return NULL;
}
new->hash_state = Hacl_Hash_SHA1_malloc();
if (PyErr_Occurred()) {
if (new->hash_state == NULL) {
Py_DECREF(new);
if (string)
if (string) {
PyBuffer_Release(&buf);
return NULL;
}
return PyErr_NoMemory();
}
if (string) {
if (buf.len >= HASHLIB_GIL_MINSIZE) {
@ -304,7 +323,8 @@ _sha1_sha1_impl(PyObject *module, PyObject *string, int usedforsecurity)
Py_BEGIN_ALLOW_THREADS
update(new->hash_state, buf.buf, buf.len);
Py_END_ALLOW_THREADS
} else {
}
else {
update(new->hash_state, buf.buf, buf.len);
}
PyBuffer_Release(&buf);

View file

@ -89,16 +89,28 @@ sha2_get_state(PyObject *module)
return (sha2_state *)state;
}
static void SHA256copy(SHA256object *src, SHA256object *dest)
static int
SHA256copy(SHA256object *src, SHA256object *dest)
{
dest->digestsize = src->digestsize;
dest->state = Hacl_Hash_SHA2_copy_256(src->state);
if (dest->state == NULL) {
(void)PyErr_NoMemory();
return -1;
}
return 0;
}
static void SHA512copy(SHA512object *src, SHA512object *dest)
static int
SHA512copy(SHA512object *src, SHA512object *dest)
{
dest->digestsize = src->digestsize;
dest->state = Hacl_Hash_SHA2_copy_512(src->state);
if (dest->state == NULL) {
(void)PyErr_NoMemory();
return -1;
}
return 0;
}
static SHA256object *
@ -166,7 +178,10 @@ static void
SHA256_dealloc(PyObject *op)
{
SHA256object *ptr = _SHA256object_CAST(op);
Hacl_Hash_SHA2_free_256(ptr->state);
if (ptr->state != NULL) {
Hacl_Hash_SHA2_free_256(ptr->state);
ptr->state = NULL;
}
PyTypeObject *tp = Py_TYPE(ptr);
PyObject_GC_UnTrack(ptr);
PyObject_GC_Del(ptr);
@ -177,7 +192,10 @@ static void
SHA512_dealloc(PyObject *op)
{
SHA512object *ptr = _SHA512object_CAST(op);
Hacl_Hash_SHA2_free_512(ptr->state);
if (ptr->state != NULL) {
Hacl_Hash_SHA2_free_512(ptr->state);
ptr->state = NULL;
}
PyTypeObject *tp = Py_TYPE(ptr);
PyObject_GC_UnTrack(ptr);
PyObject_GC_Del(ptr);
@ -187,34 +205,42 @@ SHA512_dealloc(PyObject *op)
/* HACL* takes a uint32_t for the length of its parameter, but Py_ssize_t can be
* 64 bits so we loop in <4gig chunks when needed. */
static void update_256(Hacl_Hash_SHA2_state_t_256 *state, uint8_t *buf, Py_ssize_t len) {
/* Note: we explicitly ignore the error code on the basis that it would take >
* 1 billion years to overflow the maximum admissible length for SHA2-256
* (namely, 2^61-1 bytes). */
static void
update_256(Hacl_Hash_SHA2_state_t_256 *state, uint8_t *buf, Py_ssize_t len)
{
/*
* Note: we explicitly ignore the error code on the basis that it would
* take more than 1 billion years to overflow the maximum admissible length
* for SHA-2-256 (2^61 - 1).
*/
#if PY_SSIZE_T_MAX > UINT32_MAX
while (len > UINT32_MAX) {
Hacl_Hash_SHA2_update_256(state, buf, UINT32_MAX);
len -= UINT32_MAX;
buf += UINT32_MAX;
}
while (len > UINT32_MAX) {
(void)Hacl_Hash_SHA2_update_256(state, buf, UINT32_MAX);
len -= UINT32_MAX;
buf += UINT32_MAX;
}
#endif
/* Cast to uint32_t is safe: len <= UINT32_MAX at this point. */
Hacl_Hash_SHA2_update_256(state, buf, (uint32_t) len);
/* cast to uint32_t is now safe */
(void)Hacl_Hash_SHA2_update_256(state, buf, (uint32_t)len);
}
static void update_512(Hacl_Hash_SHA2_state_t_512 *state, uint8_t *buf, Py_ssize_t len) {
/* Note: we explicitly ignore the error code on the basis that it would take >
* 1 billion years to overflow the maximum admissible length for this API
* (namely, 2^64-1 bytes). */
static void
update_512(Hacl_Hash_SHA2_state_t_512 *state, uint8_t *buf, Py_ssize_t len)
{
/*
* Note: we explicitly ignore the error code on the basis that it would
* take more than 1 billion years to overflow the maximum admissible length
* for SHA-2-512 (2^64 - 1).
*/
#if PY_SSIZE_T_MAX > UINT32_MAX
while (len > UINT32_MAX) {
Hacl_Hash_SHA2_update_512(state, buf, UINT32_MAX);
len -= UINT32_MAX;
buf += UINT32_MAX;
}
while (len > UINT32_MAX) {
(void)Hacl_Hash_SHA2_update_512(state, buf, UINT32_MAX);
len -= UINT32_MAX;
buf += UINT32_MAX;
}
#endif
/* Cast to uint32_t is safe: len <= UINT32_MAX at this point. */
Hacl_Hash_SHA2_update_512(state, buf, (uint32_t) len);
/* cast to uint32_t is now safe */
(void)Hacl_Hash_SHA2_update_512(state, buf, (uint32_t)len);
}
@ -232,21 +258,27 @@ static PyObject *
SHA256Type_copy_impl(SHA256object *self, PyTypeObject *cls)
/*[clinic end generated code: output=fabd515577805cd3 input=3137146fcb88e212]*/
{
int rc;
SHA256object *newobj;
sha2_state *state = _PyType_GetModuleState(cls);
if (Py_IS_TYPE(self, state->sha256_type)) {
if ((newobj = newSHA256object(state)) == NULL) {
return NULL;
}
} else {
}
else {
if ((newobj = newSHA224object(state)) == NULL) {
return NULL;
}
}
ENTER_HASHLIB(self);
SHA256copy(self, newobj);
rc = SHA256copy(self, newobj);
LEAVE_HASHLIB(self);
if (rc < 0) {
Py_DECREF(newobj);
return NULL;
}
return (PyObject *)newobj;
}
@ -262,6 +294,7 @@ static PyObject *
SHA512Type_copy_impl(SHA512object *self, PyTypeObject *cls)
/*[clinic end generated code: output=66d2a8ef20de8302 input=f673a18f66527c90]*/
{
int rc;
SHA512object *newobj;
sha2_state *state = _PyType_GetModuleState(cls);
@ -277,8 +310,12 @@ SHA512Type_copy_impl(SHA512object *self, PyTypeObject *cls)
}
ENTER_HASHLIB(self);
SHA512copy(self, newobj);
rc = SHA512copy(self, newobj);
LEAVE_HASHLIB(self);
if (rc < 0) {
Py_DECREF(newobj);
return NULL;
}
return (PyObject *)newobj;
}
@ -587,12 +624,12 @@ _sha2_sha256_impl(PyObject *module, PyObject *string, int usedforsecurity)
new->state = Hacl_Hash_SHA2_malloc_256();
new->digestsize = 32;
if (PyErr_Occurred()) {
if (new->state == NULL) {
Py_DECREF(new);
if (string) {
PyBuffer_Release(&buf);
}
return NULL;
return PyErr_NoMemory();
}
if (string) {
if (buf.len >= HASHLIB_GIL_MINSIZE) {
@ -601,7 +638,8 @@ _sha2_sha256_impl(PyObject *module, PyObject *string, int usedforsecurity)
Py_BEGIN_ALLOW_THREADS
update_256(new->state, buf.buf, buf.len);
Py_END_ALLOW_THREADS
} else {
}
else {
update_256(new->state, buf.buf, buf.len);
}
PyBuffer_Release(&buf);
@ -641,12 +679,12 @@ _sha2_sha224_impl(PyObject *module, PyObject *string, int usedforsecurity)
new->state = Hacl_Hash_SHA2_malloc_224();
new->digestsize = 28;
if (PyErr_Occurred()) {
if (new->state == NULL) {
Py_DECREF(new);
if (string) {
PyBuffer_Release(&buf);
}
return NULL;
return PyErr_NoMemory();
}
if (string) {
if (buf.len >= HASHLIB_GIL_MINSIZE) {
@ -655,7 +693,8 @@ _sha2_sha224_impl(PyObject *module, PyObject *string, int usedforsecurity)
Py_BEGIN_ALLOW_THREADS
update_256(new->state, buf.buf, buf.len);
Py_END_ALLOW_THREADS
} else {
}
else {
update_256(new->state, buf.buf, buf.len);
}
PyBuffer_Release(&buf);
@ -683,23 +722,26 @@ _sha2_sha512_impl(PyObject *module, PyObject *string, int usedforsecurity)
sha2_state *state = sha2_get_state(module);
if (string)
if (string) {
GET_BUFFER_VIEW_OR_ERROUT(string, &buf);
}
if ((new = newSHA512object(state)) == NULL) {
if (string)
if (string) {
PyBuffer_Release(&buf);
}
return NULL;
}
new->state = Hacl_Hash_SHA2_malloc_512();
new->digestsize = 64;
if (PyErr_Occurred()) {
if (new->state == NULL) {
Py_DECREF(new);
if (string)
if (string) {
PyBuffer_Release(&buf);
return NULL;
}
return PyErr_NoMemory();
}
if (string) {
if (buf.len >= HASHLIB_GIL_MINSIZE) {
@ -708,7 +750,8 @@ _sha2_sha512_impl(PyObject *module, PyObject *string, int usedforsecurity)
Py_BEGIN_ALLOW_THREADS
update_512(new->state, buf.buf, buf.len);
Py_END_ALLOW_THREADS
} else {
}
else {
update_512(new->state, buf.buf, buf.len);
}
PyBuffer_Release(&buf);
@ -736,23 +779,26 @@ _sha2_sha384_impl(PyObject *module, PyObject *string, int usedforsecurity)
sha2_state *state = sha2_get_state(module);
if (string)
if (string) {
GET_BUFFER_VIEW_OR_ERROUT(string, &buf);
}
if ((new = newSHA384object(state)) == NULL) {
if (string)
if (string) {
PyBuffer_Release(&buf);
}
return NULL;
}
new->state = Hacl_Hash_SHA2_malloc_384();
new->digestsize = 48;
if (PyErr_Occurred()) {
if (new->state == NULL) {
Py_DECREF(new);
if (string)
if (string) {
PyBuffer_Release(&buf);
return NULL;
}
return PyErr_NoMemory();
}
if (string) {
if (buf.len >= HASHLIB_GIL_MINSIZE) {
@ -761,7 +807,8 @@ _sha2_sha384_impl(PyObject *module, PyObject *string, int usedforsecurity)
Py_BEGIN_ALLOW_THREADS
update_512(new->state, buf.buf, buf.len);
Py_END_ALLOW_THREADS
} else {
}
else {
update_512(new->state, buf.buf, buf.len);
}
PyBuffer_Release(&buf);

View file

@ -83,18 +83,23 @@ newSHA3object(PyTypeObject *type)
return newobj;
}
static void sha3_update(Hacl_Hash_SHA3_state_t *state, uint8_t *buf, Py_ssize_t len) {
/* Note: we explicitly ignore the error code on the basis that it would take >
* 1 billion years to hash more than 2^64 bytes. */
static void
sha3_update(Hacl_Hash_SHA3_state_t *state, uint8_t *buf, Py_ssize_t len)
{
/*
* Note: we explicitly ignore the error code on the basis that it would
* take more than 1 billion years to overflow the maximum admissible length
* for SHA-3 (2^64 - 1).
*/
#if PY_SSIZE_T_MAX > UINT32_MAX
while (len > UINT32_MAX) {
Hacl_Hash_SHA3_update(state, buf, UINT32_MAX);
len -= UINT32_MAX;
buf += UINT32_MAX;
}
while (len > UINT32_MAX) {
(void)Hacl_Hash_SHA3_update(state, buf, UINT32_MAX);
len -= UINT32_MAX;
buf += UINT32_MAX;
}
#endif
/* Cast to uint32_t is safe: len <= UINT32_MAX at this point. */
Hacl_Hash_SHA3_update(state, buf, (uint32_t) len);
/* cast to uint32_t is now safe */
(void)Hacl_Hash_SHA3_update(state, buf, (uint32_t)len);
}
/*[clinic input]
@ -123,21 +128,32 @@ py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity)
if (type == state->sha3_224_type) {
self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_SHA3_224);
} else if (type == state->sha3_256_type) {
}
else if (type == state->sha3_256_type) {
self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_SHA3_256);
} else if (type == state->sha3_384_type) {
}
else if (type == state->sha3_384_type) {
self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_SHA3_384);
} else if (type == state->sha3_512_type) {
}
else if (type == state->sha3_512_type) {
self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_SHA3_512);
} else if (type == state->shake_128_type) {
}
else if (type == state->shake_128_type) {
self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_Shake128);
} else if (type == state->shake_256_type) {
}
else if (type == state->shake_256_type) {
self->hash_state = Hacl_Hash_SHA3_malloc(Spec_Hash_Definitions_Shake256);
} else {
}
else {
PyErr_BadInternalCall();
goto error;
}
if (self->hash_state == NULL) {
(void)PyErr_NoMemory();
goto error;
}
if (data) {
GET_BUFFER_VIEW_OR_ERROR(data, &buf, goto error);
if (buf.len >= HASHLIB_GIL_MINSIZE) {
@ -146,7 +162,8 @@ py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity)
Py_BEGIN_ALLOW_THREADS
sha3_update(self->hash_state, buf.buf, buf.len);
Py_END_ALLOW_THREADS
} else {
}
else {
sha3_update(self->hash_state, buf.buf, buf.len);
}
}
@ -155,7 +172,7 @@ py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity)
return (PyObject *)self;
error:
error:
if (self) {
Py_DECREF(self);
}
@ -217,6 +234,10 @@ _sha3_sha3_224_copy_impl(SHA3object *self)
ENTER_HASHLIB(self);
newobj->hash_state = Hacl_Hash_SHA3_copy(self->hash_state);
LEAVE_HASHLIB(self);
if (newobj->hash_state == NULL) {
Py_DECREF(newobj);
return PyErr_NoMemory();
}
return (PyObject *)newobj;
}
@ -232,10 +253,10 @@ _sha3_sha3_224_digest_impl(SHA3object *self)
/*[clinic end generated code: output=fd531842e20b2d5b input=5b2a659536bbd248]*/
{
unsigned char digest[SHA3_MAX_DIGESTSIZE];
// This function errors out if the algorithm is Shake. Here, we know this
// This function errors out if the algorithm is SHAKE. Here, we know this
// not to be the case, and therefore do not perform error checking.
ENTER_HASHLIB(self);
Hacl_Hash_SHA3_digest(self->hash_state, digest);
(void)Hacl_Hash_SHA3_digest(self->hash_state, digest);
LEAVE_HASHLIB(self);
return PyBytes_FromStringAndSize((const char *)digest,
Hacl_Hash_SHA3_hash_len(self->hash_state));
@ -254,7 +275,7 @@ _sha3_sha3_224_hexdigest_impl(SHA3object *self)
{
unsigned char digest[SHA3_MAX_DIGESTSIZE];
ENTER_HASHLIB(self);
Hacl_Hash_SHA3_digest(self->hash_state, digest);
(void)Hacl_Hash_SHA3_digest(self->hash_state, digest);
LEAVE_HASHLIB(self);
return _Py_strhex((const char *)digest,
Hacl_Hash_SHA3_hash_len(self->hash_state));
@ -465,13 +486,13 @@ _SHAKE_digest(PyObject *op, unsigned long digestlen, int hex)
* - the output length is zero -- we follow the existing behavior and return
* an empty digest, without raising an error */
if (digestlen > 0) {
Hacl_Hash_SHA3_squeeze(self->hash_state, digest, digestlen);
(void)Hacl_Hash_SHA3_squeeze(self->hash_state, digest, digestlen);
}
if (hex) {
result = _Py_strhex((const char *)digest, digestlen);
} else {
result = PyBytes_FromStringAndSize((const char *)digest,
digestlen);
}
else {
result = PyBytes_FromStringAndSize((const char *)digest, digestlen);
}
PyMem_Free(digest);
return result;