42 void (*gcm_gmult_accel)(uint64_t Xi[2],
const uint64_t *Htable);
48 uint8_t base_ectr[16];
51 uint8_t rem_aad_inserted : 1;
54struct lc_aes_gcm_cryptor {
55 struct lc_gcm_ctx gcm_ctx;
56 struct lc_sym_ctx sym_ctx;
59#define LC_AES_GCM_CTX_COMMON_SIZE \
60 (sizeof(struct lc_aead) + sizeof(struct lc_aes_gcm_cryptor))
62#define LC_AES_GCM_STATE_SIZE(x) (LC_SYM_STATE_SIZE(x))
63#define LC_AES_GCM_CTX_SIZE \
64 (LC_AES_GCM_CTX_COMMON_SIZE + LC_AES_GCM_STATE_SIZE(lc_aes))
66#define LC_AES_GCM_STATE_SIZE_LEN(len) (LC_SYM_STATE_SIZE_LEN(len))
67#define LC_AES_GCM_CTX_SIZE_LEN(len) \
68 (LC_AES_GCM_CTX_COMMON_SIZE + LC_AES_GCM_STATE_SIZE_LEN(len))
71extern const struct lc_aead *lc_aes_gcm_aead;
73#define _LC_AES_GCM_SET_CTX(name) \
74 _LC_SYM_SET_CTX((&name->sym_ctx), lc_aes, name, \
75 (sizeof(struct lc_aes_gcm_cryptor)))
77#define LC_AES_GCM_SET_CTX(name) \
78 LC_AEAD_CTX(name, lc_aes_gcm_aead); \
79 _LC_AES_GCM_SET_CTX(((struct lc_aes_gcm_cryptor *)name->aead_state))
119 size_t fixed_field_len, uint8_t *iv,
size_t ivlen,
127#define LC_AES_GCM_CTX_ON_STACK(name) \
128 _Pragma("GCC diagnostic push") \
129 _Pragma("GCC diagnostic ignored \"-Wvla\"") _Pragma( \
130 "GCC diagnostic ignored \"-Wdeclaration-after-statement\"") \
131 LC_ALIGNED_BUFFER(name##_ctx_buf, LC_AES_GCM_CTX_SIZE, \
132 LC_MEM_COMMON_ALIGNMENT); \
133 struct lc_aead_ctx *name = (struct lc_aead_ctx *)name##_ctx_buf; \
134 LC_AES_GCM_SET_CTX(name); \
135 lc_aead_zero(name); \
136 _Pragma("GCC diagnostic pop")
@ lc_aes_gcm_iv_generate_new
int lc_aes_gcm_generate_iv(struct lc_aead_ctx *ctx, const uint8_t *fixed_field, size_t fixed_field_len, uint8_t *iv, size_t ivlen, enum lc_aes_gcm_iv_type type)
Generate IV, set the IV to the GCM state and return it to the caller.
int lc_aes_gcm_alloc(struct lc_aead_ctx **ctx)
Allocate AES GCM cryptor context on heap.