FD.io VPP  v20.05-21-gb1500e9ff
Vector Packet Processing
quic_crypto.c
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2019 Cisco and/or its affiliates.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at:
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 #include <vnet/crypto/crypto.h>
16 #include <vppinfra/lock.h>
17 
18 #include <quic/quic.h>
19 #include <quic/quic_crypto.h>
20 
21 #include <quicly.h>
22 #include <picotls/openssl.h>
23 
24 #define QUICLY_EPOCH_1RTT 3
25 
26 extern quic_main_t quic_main;
27 extern quic_ctx_t *quic_get_conn_ctx (quicly_conn_t * conn);
28 
29 typedef void (*quicly_do_transform_fn) (ptls_cipher_context_t *, void *,
30  const void *, size_t);
31 
33 {
34  ptls_cipher_context_t super;
37 };
38 
40 {
41  ptls_aead_context_t super;
44 };
45 
46 static size_t
48  ptls_aead_context_t * _ctx, void *_output,
49  const void *input, size_t inlen,
50  uint64_t decrypted_pn, const void *aad,
51  size_t aadlen);
52 
54 
55 void
57 {
59 
60  if (batch_ctx->nb_tx_packets <= 0)
61  return;
62 
65  batch_ctx->nb_tx_packets);
67 
68  for (int i = 0; i < batch_ctx->nb_tx_packets; i++)
70 
71  batch_ctx->nb_tx_packets = 0;
72 }
73 
74 void
76 {
78 
79  if (batch_ctx->nb_rx_packets <= 0)
80  return;
81 
84  batch_ctx->nb_rx_packets);
86 
87  for (int i = 0; i < batch_ctx->nb_rx_packets; i++)
89 
90  batch_ctx->nb_rx_packets = 0;
91 }
92 
93 void
94 build_iv (ptls_aead_context_t * ctx, uint8_t * iv, uint64_t seq)
95 {
96  size_t iv_size = ctx->algo->iv_size, i;
97  const uint8_t *s = ctx->static_iv;
98  uint8_t *d = iv;
99  /* build iv */
100  for (i = iv_size - 8; i != 0; --i)
101  *d++ = *s++;
102  i = 64;
103  do
104  {
105  i -= 8;
106  *d++ = *s++ ^ (uint8_t) (seq >> i);
107  }
108  while (i != 0);
109 }
110 
111 static void
112 do_finalize_send_packet (ptls_cipher_context_t * hp,
113  quicly_datagram_t * packet,
114  size_t first_byte_at, size_t payload_from)
115 {
116  uint8_t hpmask[1 + QUICLY_SEND_PN_SIZE] = {
117  0
118  };
119  size_t i;
120 
121  ptls_cipher_init (hp,
122  packet->data.base + payload_from - QUICLY_SEND_PN_SIZE +
123  QUICLY_MAX_PN_SIZE);
124  ptls_cipher_encrypt (hp, hpmask, hpmask, sizeof (hpmask));
125 
126  packet->data.base[first_byte_at] ^=
127  hpmask[0] &
128  (QUICLY_PACKET_IS_LONG_HEADER (packet->data.base[first_byte_at]) ? 0xf :
129  0x1f);
130 
131  for (i = 0; i != QUICLY_SEND_PN_SIZE; ++i)
132  packet->data.base[payload_from + i - QUICLY_SEND_PN_SIZE] ^=
133  hpmask[i + 1];
134 }
135 
136 void
137 quic_crypto_finalize_send_packet (quicly_datagram_t * packet)
138 {
139  quic_encrypt_cb_ctx *encrypt_cb_ctx =
140  (quic_encrypt_cb_ctx *) ((uint8_t *) packet + sizeof (*packet));
141 
142  for (int i = 0; i < encrypt_cb_ctx->snd_ctx_count; i++)
143  {
144  do_finalize_send_packet (encrypt_cb_ctx->snd_ctx[i].hp,
145  packet,
146  encrypt_cb_ctx->snd_ctx[i].first_byte_at,
147  encrypt_cb_ctx->snd_ctx[i].payload_from);
148  }
149  encrypt_cb_ctx->snd_ctx_count = 0;
150 }
151 
152 static int
153 quic_crypto_setup_cipher (quicly_crypto_engine_t * engine,
154  quicly_conn_t * conn, size_t epoch, int is_enc,
155  ptls_cipher_context_t ** hp_ctx,
156  ptls_aead_context_t ** aead_ctx,
157  ptls_aead_algorithm_t * aead,
158  ptls_hash_algorithm_t * hash, const void *secret)
159 {
160  uint8_t hpkey[PTLS_MAX_SECRET_SIZE];
161  int ret;
162 
163  *aead_ctx = NULL;
164 
165  /* generate new header protection key */
166  if (hp_ctx != NULL)
167  {
168  *hp_ctx = NULL;
169  if ((ret =
170  ptls_hkdf_expand_label (hash, hpkey, aead->ctr_cipher->key_size,
171  ptls_iovec_init (secret,
172  hash->digest_size),
173  "quic hp", ptls_iovec_init (NULL, 0),
174  NULL)) != 0)
175  goto Exit;
176  if ((*hp_ctx =
177  ptls_cipher_new (aead->ctr_cipher, is_enc, hpkey)) == NULL)
178  {
179  ret = PTLS_ERROR_NO_MEMORY;
180  goto Exit;
181  }
182  }
183 
184  /* generate new AEAD context */
185  if ((*aead_ctx =
186  ptls_aead_new (aead, hash, is_enc, secret,
187  QUICLY_AEAD_BASE_LABEL)) == NULL)
188  {
189  ret = PTLS_ERROR_NO_MEMORY;
190  goto Exit;
191  }
192 
193  if (epoch == QUICLY_EPOCH_1RTT && !is_enc)
194  {
195  quic_ctx_t *qctx = quic_get_conn_ctx (conn);
196  if (qctx->ingress_keys.aead_ctx != NULL)
197  {
198  qctx->key_phase_ingress++;
199  }
200 
201  qctx->ingress_keys.aead_ctx = *aead_ctx;
202  if (hp_ctx != NULL)
203  qctx->ingress_keys.hp_ctx = *hp_ctx;
204  }
205 
206  ret = 0;
207 
208 Exit:
209  if (ret != 0)
210  {
211  if (aead_ctx && *aead_ctx != NULL)
212  {
213  ptls_aead_free (*aead_ctx);
214  *aead_ctx = NULL;
215  }
216  if (hp_ctx && *hp_ctx != NULL)
217  {
218  ptls_cipher_free (*hp_ctx);
219  *hp_ctx = NULL;
220  }
221  }
222  ptls_clear_memory (hpkey, sizeof (hpkey));
223  return ret;
224 }
225 
226 void
227 quic_crypto_finalize_send_packet_cb (struct st_quicly_crypto_engine_t
228  *engine, quicly_conn_t * conn,
229  ptls_cipher_context_t * hp,
230  ptls_aead_context_t * aead,
231  quicly_datagram_t * packet,
232  size_t first_byte_at,
233  size_t payload_from, int coalesced)
234 {
235  quic_encrypt_cb_ctx *encrypt_cb_ctx =
236  (quic_encrypt_cb_ctx *) ((uint8_t *) packet + sizeof (*packet));
237 
238  encrypt_cb_ctx->snd_ctx[encrypt_cb_ctx->snd_ctx_count].hp = hp;
239  encrypt_cb_ctx->snd_ctx[encrypt_cb_ctx->snd_ctx_count].first_byte_at =
240  first_byte_at;
241  encrypt_cb_ctx->snd_ctx[encrypt_cb_ctx->snd_ctx_count].payload_from =
242  payload_from;
243  encrypt_cb_ctx->snd_ctx_count++;
244 }
245 
246 void
248 {
249  ptls_cipher_context_t *header_protection = NULL;
250  ptls_aead_context_t *aead = NULL;
251  int pn;
252 
253  /* Long Header packets are not decrypted by vpp */
254  if (QUICLY_PACKET_IS_LONG_HEADER (pctx->packet.octets.base[0]))
255  return;
256 
257  uint64_t next_expected_packet_number =
258  quicly_get_next_expected_packet_number (qctx->conn);
259  if (next_expected_packet_number == UINT64_MAX)
260  return;
261 
262  aead = qctx->ingress_keys.aead_ctx;
263  header_protection = qctx->ingress_keys.hp_ctx;
264 
265  if (!aead || !header_protection)
266  return;
267 
268  size_t encrypted_len = pctx->packet.octets.len - pctx->packet.encrypted_off;
269  uint8_t hpmask[5] = { 0 };
270  uint32_t pnbits = 0;
271  size_t pnlen, ptlen, i;
272 
273  /* decipher the header protection, as well as obtaining pnbits, pnlen */
274  if (encrypted_len < header_protection->algo->iv_size + QUICLY_MAX_PN_SIZE)
275  return;
276  ptls_cipher_init (header_protection,
277  pctx->packet.octets.base + pctx->packet.encrypted_off +
278  QUICLY_MAX_PN_SIZE);
279  ptls_cipher_encrypt (header_protection, hpmask, hpmask, sizeof (hpmask));
280  pctx->packet.octets.base[0] ^=
281  hpmask[0] & (QUICLY_PACKET_IS_LONG_HEADER (pctx->packet.octets.base[0]) ?
282  0xf : 0x1f);
283  pnlen = (pctx->packet.octets.base[0] & 0x3) + 1;
284  for (i = 0; i != pnlen; ++i)
285  {
286  pctx->packet.octets.base[pctx->packet.encrypted_off + i] ^=
287  hpmask[i + 1];
288  pnbits =
289  (pnbits << 8) | pctx->packet.octets.base[pctx->packet.encrypted_off +
290  i];
291  }
292 
293  size_t aead_off = pctx->packet.encrypted_off + pnlen;
294 
295  pn =
296  quicly_determine_packet_number (pnbits, pnlen * 8,
297  next_expected_packet_number);
298 
299  int key_phase_bit =
300  (pctx->packet.octets.base[0] & QUICLY_KEY_PHASE_BIT) != 0;
301 
302  if (key_phase_bit != (qctx->key_phase_ingress & 1))
303  {
304  pctx->packet.octets.base[0] ^=
305  hpmask[0] &
306  (QUICLY_PACKET_IS_LONG_HEADER (pctx->packet.octets.base[0]) ? 0xf :
307  0x1f);
308  for (i = 0; i != pnlen; ++i)
309  {
310  pctx->packet.octets.base[pctx->packet.encrypted_off + i] ^=
311  hpmask[i + 1];
312  }
313  return;
314  }
315 
316  if ((ptlen =
318  pctx->packet.octets.base + aead_off,
319  pctx->packet.octets.base + aead_off,
320  pctx->packet.octets.len - aead_off,
321  pn, pctx->packet.octets.base,
322  aead_off)) == SIZE_MAX)
323  {
324  fprintf (stderr,
325  "%s: aead decryption failure (pn: %d)\n", __FUNCTION__, pn);
326  return;
327  }
328 
329  pctx->packet.encrypted_off = aead_off;
330  pctx->packet.octets.len = ptlen + aead_off;
331 
332  pctx->packet.decrypted.pn = pn;
333  pctx->packet.decrypted.key_phase = qctx->key_phase_ingress;
334 }
335 
336 #ifdef QUIC_HP_CRYPTO
337 static void
338 quic_crypto_cipher_do_init (ptls_cipher_context_t * _ctx, const void *iv)
339 {
340  struct cipher_context_t *ctx = (struct cipher_context_t *) _ctx;
342  if (!strcmp (ctx->super.algo->name, "AES128-CTR"))
343  {
344  id = VNET_CRYPTO_OP_AES_128_CTR_ENC;
345  }
346  else if (!strcmp (ctx->super.algo->name, "AES256-CTR"))
347  {
348  id = VNET_CRYPTO_OP_AES_256_CTR_ENC;
349  }
350  else
351  {
352  QUIC_DBG (1, "%s, Invalid crypto cipher : ", __FUNCTION__,
353  _ctx->algo->name);
354  assert (0);
355  }
356  vnet_crypto_op_init (&ctx->op, id);
357  ctx->op.iv = (u8 *) iv;
358  ctx->op.key_index = ctx->key_index;
359 }
360 
361 static void
362 quic_crypto_cipher_dispose (ptls_cipher_context_t * _ctx)
363 {
364  /* Do nothing */
365 }
366 
367 static void
368 quic_crypto_cipher_encrypt (ptls_cipher_context_t * _ctx, void *output,
369  const void *input, size_t _len)
370 {
372  struct cipher_context_t *ctx = (struct cipher_context_t *) _ctx;
373 
374  ctx->op.src = (u8 *) input;
375  ctx->op.dst = output;
376  ctx->op.len = _len;
377 
378  vnet_crypto_process_ops (vm, &ctx->op, 1);
379 }
380 
381 static int
382 quic_crypto_cipher_setup_crypto (ptls_cipher_context_t * _ctx, int is_enc,
383  const void *key, const EVP_CIPHER * cipher,
384  quicly_do_transform_fn do_transform)
385 {
386  struct cipher_context_t *ctx = (struct cipher_context_t *) _ctx;
387 
388  ctx->super.do_dispose = quic_crypto_cipher_dispose;
389  ctx->super.do_init = quic_crypto_cipher_do_init;
390  ctx->super.do_transform = do_transform;
391 
393  vnet_crypto_alg_t algo;
394  if (!strcmp (ctx->super.algo->name, "AES128-CTR"))
395  {
396  algo = VNET_CRYPTO_ALG_AES_128_CTR;
397  }
398  else if (!strcmp (ctx->super.algo->name, "AES256-CTR"))
399  {
400  algo = VNET_CRYPTO_ALG_AES_256_CTR;
401  }
402  else
403  {
404  QUIC_DBG (1, "%s, Invalid crypto cipher : ", __FUNCTION__,
405  _ctx->algo->name);
406  assert (0);
407  }
408 
409  ctx->key_index = vnet_crypto_key_add (vm, algo,
410  (u8 *) key, _ctx->algo->key_size);
411 
412  return 0;
413 }
414 
415 static int
416 quic_crypto_aes128ctr_setup_crypto (ptls_cipher_context_t * ctx, int is_enc,
417  const void *key)
418 {
419  return quic_crypto_cipher_setup_crypto (ctx, 1, key, EVP_aes_128_ctr (),
420  quic_crypto_cipher_encrypt);
421 }
422 
423 static int
424 quic_crypto_aes256ctr_setup_crypto (ptls_cipher_context_t * ctx, int is_enc,
425  const void *key)
426 {
427  return quic_crypto_cipher_setup_crypto (ctx, 1, key, EVP_aes_256_ctr (),
428  quic_crypto_cipher_encrypt);
429 }
430 
431 #endif // QUIC_HP_CRYPTO
432 
433 void
434 quic_crypto_aead_encrypt_init (ptls_aead_context_t * _ctx, const void *iv,
435  const void *aad, size_t aadlen)
436 {
437  quic_main_t *qm = &quic_main;
438  u32 thread_index = vlib_get_thread_index ();
439 
440  struct aead_crypto_context_t *ctx = (struct aead_crypto_context_t *) _ctx;
441 
443  if (!strcmp (ctx->super.algo->name, "AES128-GCM"))
444  {
445  id = VNET_CRYPTO_OP_AES_128_GCM_ENC;
446  }
447  else if (!strcmp (ctx->super.algo->name, "AES256-GCM"))
448  {
449  id = VNET_CRYPTO_OP_AES_256_GCM_ENC;
450  }
451  else
452  {
453  assert (0);
454  }
455 
456  quic_crypto_batch_ctx_t *quic_crypto_batch_ctx =
457  &qm->wrk_ctx[thread_index].crypto_context_batch;
458 
459  vnet_crypto_op_t *vnet_op =
460  &quic_crypto_batch_ctx->aead_crypto_tx_packets_ops
461  [quic_crypto_batch_ctx->nb_tx_packets];
462  vnet_crypto_op_init (vnet_op, id);
463  vnet_op->aad = (u8 *) aad;
464  vnet_op->aad_len = aadlen;
465  vnet_op->iv = clib_mem_alloc (PTLS_MAX_IV_SIZE);
466  clib_memcpy (vnet_op->iv, iv, PTLS_MAX_IV_SIZE);
467  vnet_op->key_index = ctx->key_index;
468 }
469 
470 size_t
471 quic_crypto_aead_encrypt_update (ptls_aead_context_t * _ctx, void *output,
472  const void *input, size_t inlen)
473 {
474  struct aead_crypto_context_t *ctx = (struct aead_crypto_context_t *) _ctx;
475 
476  quic_main_t *qm = &quic_main;
477  u32 thread_index = vlib_get_thread_index ();
478  quic_crypto_batch_ctx_t *quic_crypto_batch_ctx =
479  &qm->wrk_ctx[thread_index].crypto_context_batch;
480 
481  vnet_crypto_op_t *vnet_op =
482  &quic_crypto_batch_ctx->aead_crypto_tx_packets_ops
483  [quic_crypto_batch_ctx->nb_tx_packets];
484  vnet_op->src = (u8 *) input;
485  vnet_op->dst = output;
486  vnet_op->len = inlen;
487  vnet_op->tag_len = ctx->super.algo->tag_size;
488 
489  vnet_op->tag = vnet_op->src + inlen;
490 
491  return 0;
492 }
493 
494 size_t
495 quic_crypto_aead_encrypt_final (ptls_aead_context_t * _ctx, void *output)
496 {
497  quic_main_t *qm = &quic_main;
498  u32 thread_index = vlib_get_thread_index ();
499  quic_crypto_batch_ctx_t *quic_crypto_batch_ctx =
500  &qm->wrk_ctx[thread_index].crypto_context_batch;
501 
502  vnet_crypto_op_t *vnet_op =
503  &quic_crypto_batch_ctx->
504  aead_crypto_tx_packets_ops[quic_crypto_batch_ctx->nb_tx_packets];
505  quic_crypto_batch_ctx->nb_tx_packets++;
506  return vnet_op->len + vnet_op->tag_len;
507 }
508 
509 size_t
510 quic_crypto_aead_decrypt (ptls_aead_context_t * _ctx, void *_output,
511  const void *input, size_t inlen, const void *iv,
512  const void *aad, size_t aadlen)
513 {
515  struct aead_crypto_context_t *ctx = (struct aead_crypto_context_t *) _ctx;
516 
518  if (!strcmp (ctx->super.algo->name, "AES128-GCM"))
519  {
520  id = VNET_CRYPTO_OP_AES_128_GCM_DEC;
521  }
522  else if (!strcmp (ctx->super.algo->name, "AES256-GCM"))
523  {
524  id = VNET_CRYPTO_OP_AES_256_GCM_DEC;
525  }
526  else
527  {
528  assert (0);
529  }
530 
531  vnet_crypto_op_init (&ctx->op, id);
532  ctx->op.aad = (u8 *) aad;
533  ctx->op.aad_len = aadlen;
534  ctx->op.iv = (u8 *) iv;
535 
536  ctx->op.src = (u8 *) input;
537  ctx->op.dst = _output;
538  ctx->op.key_index = ctx->key_index;
539  ctx->op.len = inlen - ctx->super.algo->tag_size;
540 
541  ctx->op.tag_len = ctx->super.algo->tag_size;
542  ctx->op.tag = ctx->op.src + ctx->op.len;
543 
544  vnet_crypto_process_ops (vm, &ctx->op, 1);
545 
546  if (ctx->op.status != VNET_CRYPTO_OP_STATUS_COMPLETED)
547  return SIZE_MAX;
548 
549  return ctx->op.len;
550 }
551 
552 static size_t
554  ptls_aead_context_t * _ctx, void *_output,
555  const void *input, size_t inlen,
556  uint64_t decrypted_pn, const void *aad,
557  size_t aadlen)
558 {
559  struct aead_crypto_context_t *ctx = (struct aead_crypto_context_t *) _ctx;
561  if (!strcmp (ctx->super.algo->name, "AES128-GCM"))
562  {
563  id = VNET_CRYPTO_OP_AES_128_GCM_DEC;
564  }
565  else if (!strcmp (ctx->super.algo->name, "AES256-GCM"))
566  {
567  id = VNET_CRYPTO_OP_AES_256_GCM_DEC;
568  }
569  else
570  {
571  return SIZE_MAX;
572  }
573 
574  quic_main_t *qm = &quic_main;
575  quic_crypto_batch_ctx_t *quic_crypto_batch_ctx =
576  &qm->wrk_ctx[qctx->c_thread_index].crypto_context_batch;
577 
578  vnet_crypto_op_t *vnet_op =
579  &quic_crypto_batch_ctx->aead_crypto_rx_packets_ops
580  [quic_crypto_batch_ctx->nb_rx_packets];
581 
582  vnet_crypto_op_init (vnet_op, id);
583  vnet_op->aad = (u8 *) aad;
584  vnet_op->aad_len = aadlen;
585  vnet_op->iv = clib_mem_alloc (PTLS_MAX_IV_SIZE);
586  build_iv (_ctx, vnet_op->iv, decrypted_pn);
587  vnet_op->src = (u8 *) input;
588  vnet_op->dst = _output;
589  vnet_op->key_index = ctx->key_index;
590  vnet_op->len = inlen - ctx->super.algo->tag_size;
591  vnet_op->tag_len = ctx->super.algo->tag_size;
592  vnet_op->tag = vnet_op->src + vnet_op->len;
593  quic_crypto_batch_ctx->nb_rx_packets++;
594  return vnet_op->len;
595 }
596 
597 static void
598 quic_crypto_aead_dispose_crypto (ptls_aead_context_t * _ctx)
599 {
600 
601 }
602 
603 static int
604 quic_crypto_aead_setup_crypto (ptls_aead_context_t * _ctx, int is_enc,
605  const void *key, const EVP_CIPHER * cipher)
606 {
608  struct aead_crypto_context_t *ctx = (struct aead_crypto_context_t *) _ctx;
609 
610  vnet_crypto_alg_t algo;
611  if (!strcmp (ctx->super.algo->name, "AES128-GCM"))
612  {
613  algo = VNET_CRYPTO_ALG_AES_128_GCM;
614  }
615  else if (!strcmp (ctx->super.algo->name, "AES256-GCM"))
616  {
617  algo = VNET_CRYPTO_ALG_AES_256_GCM;
618  }
619  else
620  {
621  QUIC_DBG (1, "%s, invalied aead cipher %s", __FUNCTION__,
622  _ctx->algo->name);
623  assert (0);
624  }
625 
626  if (quic_main.vnet_crypto_enabled)
627  {
628  ctx->super.do_decrypt = quic_crypto_aead_decrypt;
629 
630  ctx->super.do_encrypt_init = quic_crypto_aead_encrypt_init;
631  ctx->super.do_encrypt_update = quic_crypto_aead_encrypt_update;
632  ctx->super.do_encrypt_final = quic_crypto_aead_encrypt_final;
633  ctx->super.dispose_crypto = quic_crypto_aead_dispose_crypto;
634 
636  ctx->key_index = vnet_crypto_key_add (vm, algo,
637  (u8 *) key, _ctx->algo->key_size);
639  }
640  else
641  {
642  if (!strcmp (ctx->super.algo->name, "AES128-GCM"))
643  ptls_openssl_aes128gcm.setup_crypto (_ctx, is_enc, key);
644  else if (!strcmp (ctx->super.algo->name, "AES256-GCM"))
645  ptls_openssl_aes256gcm.setup_crypto (_ctx, is_enc, key);
646  }
647 
648  return 0;
649 }
650 
651 static int
652 quic_crypto_aead_aes128gcm_setup_crypto (ptls_aead_context_t * ctx,
653  int is_enc, const void *key)
654 {
655  return quic_crypto_aead_setup_crypto (ctx, is_enc, key, EVP_aes_128_gcm ());
656 }
657 
658 static int
659 quic_crypto_aead_aes256gcm_setup_crypto (ptls_aead_context_t * ctx,
660  int is_enc, const void *key)
661 {
662  return quic_crypto_aead_setup_crypto (ctx, is_enc, key, EVP_aes_256_gcm ());
663 }
664 
665 #ifdef QUIC_HP_CRYPTO
666 ptls_cipher_algorithm_t quic_crypto_aes128ctr = {
667  "AES128-CTR",
668  PTLS_AES128_KEY_SIZE,
669  1, PTLS_AES_IV_SIZE,
670  sizeof (struct cipher_context_t), aes128ctr_setup_crypto
671 };
672 
673 ptls_cipher_algorithm_t quic_crypto_aes256ctr = {
674  "AES256-CTR", PTLS_AES256_KEY_SIZE, 1 /* block size */ ,
675  PTLS_AES_IV_SIZE, sizeof (struct cipher_context_t), aes256ctr_setup_crypto
676 };
677 #endif
678 
679 ptls_aead_algorithm_t quic_crypto_aes128gcm = {
680  "AES128-GCM",
681 #ifdef QUIC_HP_CRYPTO
682  &quic_crypto_aes128ctr,
683 #else
684  &ptls_openssl_aes128ctr,
685 #endif
686  &ptls_openssl_aes128ecb,
687  PTLS_AES128_KEY_SIZE,
688  PTLS_AESGCM_IV_SIZE,
689  PTLS_AESGCM_TAG_SIZE,
690  sizeof (struct aead_crypto_context_t),
691  quic_crypto_aead_aes128gcm_setup_crypto
692 };
693 
694 ptls_aead_algorithm_t quic_crypto_aes256gcm = {
695  "AES256-GCM",
696 #ifdef QUIC_HP_CRYPTO
697  &quic_crypto_aes256ctr,
698 #else
699  &ptls_openssl_aes256ctr,
700 #endif
701  &ptls_openssl_aes256ecb,
702  PTLS_AES256_KEY_SIZE,
703  PTLS_AESGCM_IV_SIZE,
704  PTLS_AESGCM_TAG_SIZE,
705  sizeof (struct aead_crypto_context_t),
706  quic_crypto_aead_aes256gcm_setup_crypto
707 };
708 
709 ptls_cipher_suite_t quic_crypto_aes128gcmsha256 = {
710  PTLS_CIPHER_SUITE_AES_128_GCM_SHA256,
711  &quic_crypto_aes128gcm, &ptls_openssl_sha256
712 };
713 
714 ptls_cipher_suite_t quic_crypto_aes256gcmsha384 = {
715  PTLS_CIPHER_SUITE_AES_256_GCM_SHA384,
716  &quic_crypto_aes256gcm, &ptls_openssl_sha384
717 };
718 
719 ptls_cipher_suite_t *quic_crypto_cipher_suites[] = {
721 };
722 
723 quicly_crypto_engine_t quic_crypto_engine = {
724  quic_crypto_setup_cipher, quic_crypto_finalize_send_packet_cb
725 };
726 
727 int
728 quic_encrypt_ticket_cb (ptls_encrypt_ticket_t * _self, ptls_t * tls,
729  int is_encrypt, ptls_buffer_t * dst, ptls_iovec_t src)
730 {
731  quic_session_cache_t *self = (void *) _self;
732  int ret;
733 
734  if (is_encrypt)
735  {
736 
737  /* replace the cached entry along with a newly generated session id */
738  clib_mem_free (self->data.base);
739  if ((self->data.base = clib_mem_alloc (src.len)) == NULL)
740  return PTLS_ERROR_NO_MEMORY;
741 
742  ptls_get_context (tls)->random_bytes (self->id, sizeof (self->id));
743  clib_memcpy (self->data.base, src.base, src.len);
744  self->data.len = src.len;
745 
746  /* store the session id in buffer */
747  if ((ret = ptls_buffer_reserve (dst, sizeof (self->id))) != 0)
748  return ret;
749  clib_memcpy (dst->base + dst->off, self->id, sizeof (self->id));
750  dst->off += sizeof (self->id);
751 
752  }
753  else
754  {
755 
756  /* check if session id is the one stored in cache */
757  if (src.len != sizeof (self->id))
758  return PTLS_ERROR_SESSION_NOT_FOUND;
759  if (clib_memcmp (self->id, src.base, sizeof (self->id)) != 0)
760  return PTLS_ERROR_SESSION_NOT_FOUND;
761 
762  /* return the cached value */
763  if ((ret = ptls_buffer_reserve (dst, self->data.len)) != 0)
764  return ret;
765  clib_memcpy (dst->base + dst->off, self->data.base, self->data.len);
766  dst->off += self->data.len;
767  }
768 
769  return 0;
770 }
771 
772 /*
773  * fd.io coding-style-patch-verification: ON
774  *
775  * Local Variables:
776  * eval: (c-set-style "gnu")
777  * End:
778  */
u32 vnet_crypto_process_ops(vlib_main_t *vm, vnet_crypto_op_t ops[], u32 n_ops)
Definition: crypto.c:99
static void clib_rwlock_reader_lock(clib_rwlock_t *p)
Definition: lock.h:150
ptls_cipher_context_t super
Definition: quic_crypto.c:34
quic_worker_ctx_t * wrk_ctx
Definition: quic.h:263
static void clib_rwlock_writer_lock(clib_rwlock_t *p)
Definition: lock.h:173
int key_phase_ingress
Definition: quic.h:176
static void do_finalize_send_packet(ptls_cipher_context_t *hp, quicly_datagram_t *packet, size_t first_byte_at, size_t payload_from)
Definition: quic_crypto.c:112
ptls_cipher_context_t * hp_ctx
Definition: quic.h:173
size_t snd_ctx_count
Definition: quic.h:222
vl_api_address_t src
Definition: gre.api:54
struct quic_ctx_::@650 ingress_keys
unsigned char u8
Definition: types.h:56
size_t quic_crypto_aead_decrypt(ptls_aead_context_t *_ctx, void *_output, const void *input, size_t inlen, const void *iv, const void *aad, size_t aadlen)
Definition: quic_crypto.c:510
#define QUIC_DBG(_lvl, _fmt, _args...)
Definition: quic.h:80
u8 id[64]
Definition: dhcp.api:160
size_t quic_crypto_aead_encrypt_update(ptls_aead_context_t *_ctx, void *output, const void *input, size_t inlen)
Definition: quic_crypto.c:471
#define clib_memcpy(d, s, n)
Definition: string.h:180
#define assert(x)
Definition: dlmalloc.c:31
clib_rwlock_t crypto_keys_quic_rw_lock
Definition: quic.h:281
static_always_inline void vnet_crypto_op_init(vnet_crypto_op_t *op, vnet_crypto_op_id_t type)
Definition: crypto.h:483
size_t quic_crypto_aead_encrypt_final(ptls_aead_context_t *_ctx, void *output)
Definition: quic_crypto.c:495
static int quic_crypto_aead_setup_crypto(ptls_aead_context_t *_ctx, int is_enc, const void *key, const EVP_CIPHER *cipher)
Definition: quic_crypto.c:604
static size_t quic_crypto_offload_aead_decrypt(quic_ctx_t *qctx, ptls_aead_context_t *_ctx, void *_output, const void *input, size_t inlen, uint64_t decrypted_pn, const void *aad, size_t aadlen)
Definition: quic_crypto.c:553
struct quic_encrypt_cb_ctx_::quic_finalize_send_packet_cb_ctx_ snd_ctx[QUIC_MAX_COALESCED_PACKET]
unsigned int u32
Definition: types.h:88
vnet_crypto_op_t op
Definition: quic_crypto.c:35
static int quic_crypto_aead_aes256gcm_setup_crypto(ptls_aead_context_t *ctx, int is_enc, const void *key)
Definition: quic_crypto.c:659
vnet_crypto_op_t aead_crypto_rx_packets_ops[QUIC_RCV_MAX_BATCH_PACKETS]
Definition: quic.h:227
u32 vnet_crypto_key_add(vlib_main_t *vm, vnet_crypto_alg_t alg, u8 *data, u16 length)
Definition: crypto.c:345
vnet_crypto_alg_t
Definition: crypto.h:121
quic_main_t quic_main
Definition: quic.c:46
static u8 iv[]
Definition: aes_cbc.c:24
ptls_cipher_suite_t quic_crypto_aes256gcmsha384
Definition: quic_crypto.c:714
static void clib_rwlock_reader_unlock(clib_rwlock_t *p)
Definition: lock.h:165
long ctx[MAX_CONNS]
Definition: main.c:144
void quic_crypto_batch_rx_packets(quic_crypto_batch_ctx_t *batch_ctx)
Definition: quic_crypto.c:75
void quic_crypto_aead_encrypt_init(ptls_aead_context_t *_ctx, const void *iv, const void *aad, size_t aadlen)
Definition: quic_crypto.c:434
static int quic_crypto_aead_aes128gcm_setup_crypto(ptls_aead_context_t *ctx, int is_enc, const void *key)
Definition: quic_crypto.c:652
vl_api_address_t dst
Definition: gre.api:55
vlib_main_t * vm
Definition: in2out_ed.c:1599
static void clib_rwlock_writer_unlock(clib_rwlock_t *p)
Definition: lock.h:187
quicly_conn_t * conn
QUIC ctx case.
Definition: quic.h:146
static_always_inline uword vlib_get_thread_index(void)
Definition: threads.h:218
#define clib_memcmp(s1, s2, m1)
Definition: string.h:737
sll srl srl sll sra u16x4 i
Definition: vector_sse42.h:317
void(* quicly_do_transform_fn)(ptls_cipher_context_t *, void *, const void *, size_t)
Definition: quic_crypto.c:29
void quic_crypto_decrypt_packet(quic_ctx_t *qctx, quic_rx_packet_ctx_t *pctx)
Definition: quic_crypto.c:247
size_t nb_rx_packets
Definition: quic.h:229
ptls_cipher_suite_t quic_crypto_aes128gcmsha256
Definition: quic_crypto.c:709
u8 vnet_crypto_enabled
Definition: quic.h:279
static void clib_mem_free(void *p)
Definition: mem.h:215
#define QUICLY_EPOCH_1RTT
Definition: quic_crypto.c:24
static void * clib_mem_alloc(uword size)
Definition: mem.h:157
static vlib_main_t * vlib_get_main(void)
Definition: global_funcs.h:23
vnet_crypto_op_t aead_crypto_tx_packets_ops[QUIC_SEND_MAX_BATCH_PACKETS]
Definition: quic.h:227
typedef key
Definition: ipsec_types.api:85
static void quic_crypto_aead_dispose_crypto(ptls_aead_context_t *_ctx)
Definition: quic_crypto.c:598
void quic_crypto_batch_tx_packets(quic_crypto_batch_ctx_t *batch_ctx)
Definition: quic_crypto.c:56
ptls_aead_context_t * aead_ctx
Definition: quic.h:174
vnet_crypto_op_t op
Definition: quic_crypto.c:42
quicly_decoded_packet_t packet
Definition: quic.h:245
ptls_aead_algorithm_t quic_crypto_aes256gcm
Definition: quic_crypto.c:694
ptls_aead_context_t super
Definition: quic_crypto.c:41
void quic_crypto_finalize_send_packet_cb(struct st_quicly_crypto_engine_t *engine, quicly_conn_t *conn, ptls_cipher_context_t *hp, ptls_aead_context_t *aead, quicly_datagram_t *packet, size_t first_byte_at, size_t payload_from, int coalesced)
Definition: quic_crypto.c:227
vnet_crypto_op_status_t status
Definition: crypto.h:233
vnet_crypto_op_id_t
Definition: crypto.h:193
static int quic_crypto_setup_cipher(quicly_crypto_engine_t *engine, quicly_conn_t *conn, size_t epoch, int is_enc, ptls_cipher_context_t **hp_ctx, ptls_aead_context_t **aead_ctx, ptls_aead_algorithm_t *aead, ptls_hash_algorithm_t *hash, const void *secret)
Definition: quic_crypto.c:153
void build_iv(ptls_aead_context_t *ctx, uint8_t *iv, uint64_t seq)
Definition: quic_crypto.c:94
quic_crypto_batch_ctx_t crypto_context_batch
Definition: quic.h:240
void quic_crypto_finalize_send_packet(quicly_datagram_t *packet)
Definition: quic_crypto.c:137
int quic_encrypt_ticket_cb(ptls_encrypt_ticket_t *_self, ptls_t *tls, int is_encrypt, ptls_buffer_t *dst, ptls_iovec_t src)
Definition: quic_crypto.c:728
vnet_crypto_main_t crypto_main
Definition: crypto.c:20
ptls_aead_algorithm_t quic_crypto_aes128gcm
Definition: quic_crypto.c:679
quic_ctx_t * quic_get_conn_ctx(quicly_conn_t *conn)
Definition: quic.c:416
size_t nb_tx_packets
Definition: quic.h:229