tls: code shrink

function                                             old     new   delta
xwrite_encrypted                                     599     585     -14

Signed-off-by: Denys Vlasenko <vda.linux@googlemail.com>
This commit is contained in:
Denys Vlasenko 2018-11-23 18:48:20 +01:00
parent ecc9090cfc
commit 219c9d4b5d
2 changed files with 21 additions and 15 deletions

View File

@ -236,6 +236,7 @@ typedef uint64_t bb__aliased_uint64_t FIX_ALIASING;
# define move_from_unaligned32(v, u32p) ((v) = *(bb__aliased_uint32_t*)(u32p))
# define move_to_unaligned16(u16p, v) (*(bb__aliased_uint16_t*)(u16p) = (v))
# define move_to_unaligned32(u32p, v) (*(bb__aliased_uint32_t*)(u32p) = (v))
# define move_to_unaligned64(u64p, v) (*(bb__aliased_uint64_t*)(u64p) = (v))
/* #elif ... - add your favorite arch today! */
#else
# define BB_UNALIGNED_MEMACCESS_OK 0
@ -252,6 +253,10 @@ typedef uint64_t bb__aliased_uint64_t FIX_ALIASING;
uint32_t __t = (v); \
memcpy((u32p), &__t, 4); \
} while (0)
# define move_to_unaligned64(u64p, v) do { \
uint64_t __t = (v); \
memcpy((u64p), &__t, 8); \
} while (0)
#endif
/* Unaligned, fixed-endian accessors */

View File

@ -788,14 +788,15 @@ static void xwrite_encrypted_aesgcm(tls_state_t *tls, unsigned size, unsigned ty
{
#define COUNTER(v) (*(uint32_t*)(v + 12))
uint8_t aad[13 + 3]; /* +3 creates [16] buffer, simplifying GHASH() */
uint8_t nonce[12 + 4]; /* +4 creates space for AES block counter */
uint8_t scratch[AES_BLOCK_SIZE]; //[16]
uint8_t authtag[AES_BLOCK_SIZE]; //[16]
uint8_t aad[13 + 3] ALIGNED(4); /* +3 creates [16] buffer, simplifying GHASH() */
uint8_t nonce[12 + 4] ALIGNED(4); /* +4 creates space for AES block counter */
uint8_t scratch[AES_BLOCK_SIZE] ALIGNED(4); //[16]
uint8_t authtag[AES_BLOCK_SIZE] ALIGNED(4); //[16]
uint8_t *buf;
struct record_hdr *xhdr;
unsigned remaining;
unsigned cnt;
uint64_t t64;
buf = tls->outbuf + OUTBUF_PFX; /* see above for the byte it points to */
dump_hex("xwrite_encrypted_aesgcm plaintext:%s\n", buf, size);
@ -810,13 +811,13 @@ static void xwrite_encrypted_aesgcm(tls_state_t *tls, unsigned size, unsigned ty
/* set aad[12], and clear aad[13..15] */
COUNTER(aad) = SWAP_LE32(size & 0xff);
memcpy(nonce, tls->client_write_IV, 4);
memcpy(nonce + 4, &tls->write_seq64_be, 8);
memcpy(aad, &tls->write_seq64_be, 8);
memcpy(buf - 8, &tls->write_seq64_be, 8);
//optimize
memcpy(nonce, tls->client_write_IV, 4);
t64 = tls->write_seq64_be;
move_to_unaligned64(nonce + 4, t64);
move_to_unaligned64(aad, t64);
move_to_unaligned64(buf - 8, t64);
/* seq64 is not used later in this func, can increment here */
tls->write_seq64_be = SWAP_BE64(1 + SWAP_BE64(tls->write_seq64_be));
tls->write_seq64_be = SWAP_BE64(1 + SWAP_BE64(t64));
cnt = 1;
remaining = size;
@ -923,13 +924,14 @@ static void tls_aesgcm_decrypt(tls_state_t *tls, uint8_t *buf, int size)
{
#define COUNTER(v) (*(uint32_t*)(v + 12))
//uint8_t aad[13 + 3]; /* +3 creates [16] buffer, simplifying GHASH() */
uint8_t nonce[12 + 4]; /* +4 creates space for AES block counter */
uint8_t scratch[AES_BLOCK_SIZE]; //[16]
//uint8_t authtag[AES_BLOCK_SIZE]; //[16]
//uint8_t aad[13 + 3] ALIGNED(4); /* +3 creates [16] buffer, simplifying GHASH() */
uint8_t nonce[12 + 4] ALIGNED(4); /* +4 creates space for AES block counter */
uint8_t scratch[AES_BLOCK_SIZE] ALIGNED(4); //[16]
//uint8_t authtag[AES_BLOCK_SIZE] ALIGNED(4); //[16]
unsigned remaining;
unsigned cnt;
//memcpy(aad, buf, 8);
//aad[8] = type;
//aad[9] = TLS_MAJ;
//aad[10] = TLS_MIN;
@ -937,7 +939,6 @@ static void tls_aesgcm_decrypt(tls_state_t *tls, uint8_t *buf, int size)
///* set aad[12], and clear aad[13..15] */
//COUNTER(aad) = SWAP_LE32(size & 0xff);
//memcpy(aad, &tls->write_seq64_be, 8);
memcpy(nonce, tls->server_write_IV, 4);
memcpy(nonce + 4, buf, 8);
buf += 8;