FD.io VPP  v20.05-21-gb1500e9ff
Vector Packet Processing
main.c
Go to the documentation of this file.
1 /*
2  *------------------------------------------------------------------
3  * Copyright (c) 2019 Cisco and/or its affiliates.
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at:
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  *------------------------------------------------------------------
16  */
17 
18 #include <openssl/evp.h>
19 #include <openssl/hmac.h>
20 #include <openssl/rand.h>
21 
22 #include <vlib/vlib.h>
23 #include <vnet/plugin/plugin.h>
24 #include <vnet/crypto/crypto.h>
25 #include <vpp/app/version.h>
26 
27 typedef struct
28 {
29  CLIB_CACHE_LINE_ALIGN_MARK (cacheline0);
30  EVP_CIPHER_CTX *evp_cipher_ctx;
31  HMAC_CTX *hmac_ctx;
32 #if OPENSSL_VERSION_NUMBER < 0x10100000L
33  HMAC_CTX _hmac_ctx;
34 #endif
36 
37 static openssl_per_thread_data_t *per_thread_data = 0;
38 
39 #define foreach_openssl_evp_op \
40  _(cbc, DES_CBC, EVP_des_cbc) \
41  _(cbc, 3DES_CBC, EVP_des_ede3_cbc) \
42  _(cbc, AES_128_CBC, EVP_aes_128_cbc) \
43  _(cbc, AES_192_CBC, EVP_aes_192_cbc) \
44  _(cbc, AES_256_CBC, EVP_aes_256_cbc) \
45  _(gcm, AES_128_GCM, EVP_aes_128_gcm) \
46  _(gcm, AES_192_GCM, EVP_aes_192_gcm) \
47  _(gcm, AES_256_GCM, EVP_aes_256_gcm) \
48  _(cbc, AES_128_CTR, EVP_aes_128_ctr) \
49  _(cbc, AES_192_CTR, EVP_aes_192_ctr) \
50  _(cbc, AES_256_CTR, EVP_aes_256_ctr) \
51 
52 #define foreach_openssl_hmac_op \
53  _(MD5, EVP_md5) \
54  _(SHA1, EVP_sha1) \
55  _(SHA224, EVP_sha224) \
56  _(SHA256, EVP_sha256) \
57  _(SHA384, EVP_sha384) \
58  _(SHA512, EVP_sha512)
59 
62  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
63  const EVP_CIPHER * cipher)
64 {
65  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
66  vm->thread_index);
67  EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
69  u32 i, j, curr_len = 0;
70  u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
71 
72  for (i = 0; i < n_ops; i++)
73  {
74  vnet_crypto_op_t *op = ops[i];
76  int out_len = 0;
77  int iv_len;
78 
79  if (op->op == VNET_CRYPTO_OP_3DES_CBC_ENC
80  || op->op == VNET_CRYPTO_OP_DES_CBC_ENC)
81  iv_len = 8;
82  else
83  iv_len = 16;
84 
86  RAND_bytes (op->iv, iv_len);
87 
88  EVP_EncryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
89 
91  EVP_CIPHER_CTX_set_padding (ctx, 0);
92 
94  {
95  chp = chunks + op->chunk_index;
96  u32 offset = 0;
97  for (j = 0; j < op->n_chunks; j++)
98  {
99  EVP_EncryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
100  chp->len);
101  curr_len = chp->len;
102  offset += out_len;
103  chp += 1;
104  }
105  if (out_len < curr_len)
106  EVP_EncryptFinal_ex (ctx, out_buf + offset, &out_len);
107 
108  offset = 0;
109  chp = chunks + op->chunk_index;
110  for (j = 0; j < op->n_chunks; j++)
111  {
112  clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
113  offset += chp->len;
114  chp += 1;
115  }
116  }
117  else
118  {
119  EVP_EncryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
120  if (out_len < op->len)
121  EVP_EncryptFinal_ex (ctx, op->dst + out_len, &out_len);
122  }
123  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
124  }
125  return n_ops;
126 }
127 
130  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
131  const EVP_CIPHER * cipher)
132 {
133  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
134  vm->thread_index);
135  EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
137  u32 i, j, curr_len = 0;
138  u8 out_buf[VLIB_BUFFER_DEFAULT_DATA_SIZE * 5];
139 
140  for (i = 0; i < n_ops; i++)
141  {
142  vnet_crypto_op_t *op = ops[i];
144  int out_len = 0;
145 
146  EVP_DecryptInit_ex (ctx, cipher, NULL, key->data, op->iv);
147 
149  EVP_CIPHER_CTX_set_padding (ctx, 0);
150 
152  {
153  chp = chunks + op->chunk_index;
154  u32 offset = 0;
155  for (j = 0; j < op->n_chunks; j++)
156  {
157  EVP_DecryptUpdate (ctx, out_buf + offset, &out_len, chp->src,
158  chp->len);
159  curr_len = chp->len;
160  offset += out_len;
161  chp += 1;
162  }
163  if (out_len < curr_len)
164  EVP_DecryptFinal_ex (ctx, out_buf + offset, &out_len);
165 
166  offset = 0;
167  chp = chunks + op->chunk_index;
168  for (j = 0; j < op->n_chunks; j++)
169  {
170  clib_memcpy_fast (chp->dst, out_buf + offset, chp->len);
171  offset += chp->len;
172  chp += 1;
173  }
174  }
175  else
176  {
177  EVP_DecryptUpdate (ctx, op->dst, &out_len, op->src, op->len);
178  if (out_len < op->len)
179  EVP_DecryptFinal_ex (ctx, op->dst + out_len, &out_len);
180  }
181  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
182  }
183  return n_ops;
184 }
185 
188  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
189  const EVP_CIPHER * cipher)
190 {
191  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
192  vm->thread_index);
193  EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
195  u32 i, j;
196  for (i = 0; i < n_ops; i++)
197  {
198  vnet_crypto_op_t *op = ops[i];
200  int len = 0;
201 
203  RAND_bytes (op->iv, 8);
204 
205  EVP_EncryptInit_ex (ctx, cipher, 0, 0, 0);
206  EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, NULL);
207  EVP_EncryptInit_ex (ctx, 0, 0, key->data, op->iv);
208  if (op->aad_len)
209  EVP_EncryptUpdate (ctx, NULL, &len, op->aad, op->aad_len);
211  {
212  chp = chunks + op->chunk_index;
213  for (j = 0; j < op->n_chunks; j++)
214  {
215  EVP_EncryptUpdate (ctx, chp->dst, &len, chp->src, chp->len);
216  chp += 1;
217  }
218  }
219  else
220  EVP_EncryptUpdate (ctx, op->dst, &len, op->src, op->len);
221  EVP_EncryptFinal_ex (ctx, op->dst + len, &len);
222  EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_GET_TAG, op->tag_len, op->tag);
223  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
224  }
225  return n_ops;
226 }
227 
230  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
231  const EVP_CIPHER * cipher)
232 {
233  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
234  vm->thread_index);
235  EVP_CIPHER_CTX *ctx = ptd->evp_cipher_ctx;
237  u32 i, j, n_fail = 0;
238  for (i = 0; i < n_ops; i++)
239  {
240  vnet_crypto_op_t *op = ops[i];
242  int len = 0;
243 
244  EVP_DecryptInit_ex (ctx, cipher, 0, 0, 0);
245  EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_IVLEN, 12, 0);
246  EVP_DecryptInit_ex (ctx, 0, 0, key->data, op->iv);
247  if (op->aad_len)
248  EVP_DecryptUpdate (ctx, 0, &len, op->aad, op->aad_len);
250  {
251  chp = chunks + op->chunk_index;
252  for (j = 0; j < op->n_chunks; j++)
253  {
254  EVP_DecryptUpdate (ctx, chp->dst, &len, chp->src, chp->len);
255  chp += 1;
256  }
257  }
258  else
259  EVP_DecryptUpdate (ctx, op->dst, &len, op->src, op->len);
260  EVP_CIPHER_CTX_ctrl (ctx, EVP_CTRL_GCM_SET_TAG, op->tag_len, op->tag);
261 
262  if (EVP_DecryptFinal_ex (ctx, op->dst + len, &len) > 0)
263  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
264  else
265  {
266  n_fail++;
267  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
268  }
269  }
270  return n_ops - n_fail;
271 }
272 
275  vnet_crypto_op_chunk_t * chunks, u32 n_ops,
276  const EVP_MD * md)
277 {
278  u8 buffer[64];
279  openssl_per_thread_data_t *ptd = vec_elt_at_index (per_thread_data,
280  vm->thread_index);
281  HMAC_CTX *ctx = ptd->hmac_ctx;
283  u32 i, j, n_fail = 0;
284  for (i = 0; i < n_ops; i++)
285  {
286  vnet_crypto_op_t *op = ops[i];
288  unsigned int out_len = 0;
289  size_t sz = op->digest_len ? op->digest_len : EVP_MD_size (md);
290 
291  HMAC_Init_ex (ctx, key->data, vec_len (key->data), md, NULL);
293  {
294  chp = chunks + op->chunk_index;
295  for (j = 0; j < op->n_chunks; j++)
296  {
297  HMAC_Update (ctx, chp->src, chp->len);
298  chp += 1;
299  }
300  }
301  else
302  HMAC_Update (ctx, op->src, op->len);
303  HMAC_Final (ctx, buffer, &out_len);
304 
306  {
307  if ((memcmp (op->digest, buffer, sz)))
308  {
309  n_fail++;
310  op->status = VNET_CRYPTO_OP_STATUS_FAIL_BAD_HMAC;
311  continue;
312  }
313  }
314  else
315  clib_memcpy_fast (op->digest, buffer, sz);
316  op->status = VNET_CRYPTO_OP_STATUS_COMPLETED;
317  }
318  return n_ops - n_fail;
319 }
320 
321 #define _(m, a, b) \
322 static u32 \
323 openssl_ops_enc_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
324 { return openssl_ops_enc_##m (vm, ops, 0, n_ops, b ()); } \
325  \
326 u32 \
327 openssl_ops_dec_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
328 { return openssl_ops_dec_##m (vm, ops, 0, n_ops, b ()); } \
329  \
330 static u32 \
331 openssl_ops_enc_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
332  vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
333 { return openssl_ops_enc_##m (vm, ops, chunks, n_ops, b ()); } \
334  \
335 static u32 \
336 openssl_ops_dec_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
337  vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
338 { return openssl_ops_dec_##m (vm, ops, chunks, n_ops, b ()); }
339 
341 #undef _
342 
343 #define _(a, b) \
344 static u32 \
345 openssl_ops_hmac_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], u32 n_ops) \
346 { return openssl_ops_hmac (vm, ops, 0, n_ops, b ()); } \
347 static u32 \
348 openssl_ops_hmac_chained_##a (vlib_main_t * vm, vnet_crypto_op_t * ops[], \
349  vnet_crypto_op_chunk_t *chunks, u32 n_ops) \
350 { return openssl_ops_hmac (vm, ops, chunks, n_ops, b ()); } \
351 
353 #undef _
354 
355 
356 clib_error_t *
358 {
361  u8 *seed_data = 0;
362  time_t t;
363  pid_t pid;
364 
365  u32 eidx = vnet_crypto_register_engine (vm, "openssl", 50, "OpenSSL");
366 
367 #define _(m, a, b) \
368  vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_ENC, \
369  openssl_ops_enc_##a, \
370  openssl_ops_enc_chained_##a); \
371  vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_DEC, \
372  openssl_ops_dec_##a, \
373  openssl_ops_dec_chained_##a); \
374 
376 #undef _
377 
378 #define _(a, b) \
379  vnet_crypto_register_ops_handlers (vm, eidx, VNET_CRYPTO_OP_##a##_HMAC, \
380  openssl_ops_hmac_##a, \
381  openssl_ops_hmac_chained_##a); \
382 
384 #undef _
385 
386  vec_validate_aligned (per_thread_data, tm->n_vlib_mains - 1,
388 
389  vec_foreach (ptd, per_thread_data)
390  {
391  ptd->evp_cipher_ctx = EVP_CIPHER_CTX_new ();
392 #if OPENSSL_VERSION_NUMBER >= 0x10100000L
393  ptd->hmac_ctx = HMAC_CTX_new ();
394 #else
395  HMAC_CTX_init (&(ptd->_hmac_ctx));
396  ptd->hmac_ctx = &ptd->_hmac_ctx;
397 #endif
398  }
399 
400  t = time (NULL);
401  pid = getpid ();
402  vec_add (seed_data, &t, sizeof (t));
403  vec_add (seed_data, &pid, sizeof (pid));
404  vec_add (seed_data, seed_data, sizeof (seed_data));
405 
406  RAND_seed ((const void *) seed_data, vec_len (seed_data));
407 
408  vec_free (seed_data);
409 
410  return 0;
411 }
412 
413 /* *INDENT-OFF* */
415 {
416  .runs_after = VLIB_INITS ("vnet_crypto_init"),
417 };
418 /* *INDENT-ON* */
419 
420 
421 /* *INDENT-OFF* */
423  .version = VPP_BUILD_VER,
424  .description = "OpenSSL Crypto Engine",
425 };
426 /* *INDENT-ON* */
427 
428 /*
429  * fd.io coding-style-patch-verification: ON
430  *
431  * Local Variables:
432  * eval: (c-set-style "gnu")
433  * End:
434  */
#define CLIB_CACHE_LINE_ALIGN_MARK(mark)
Definition: cache.h:60
static_always_inline u32 openssl_ops_hmac(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_MD *md)
Definition: main.c:274
VLIB_PLUGIN_REGISTER()
#define clib_memcpy_fast(a, b, c)
Definition: string.h:81
static_always_inline u32 openssl_ops_enc_cbc(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher)
Definition: main.c:61
#define VNET_CRYPTO_OP_FLAG_CHAINED_BUFFERS
Definition: crypto.h:237
u32 thread_index
Definition: main.h:218
#define vec_validate_aligned(V, I, A)
Make sure vector is long enough for given index (no header, specified alignment)
Definition: vec.h:518
static_always_inline u32 openssl_ops_dec_cbc(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher)
Definition: main.c:129
HMAC_CTX * hmac_ctx
Definition: main.c:31
unsigned char u8
Definition: types.h:56
#define vec_add(V, E, N)
Add N elements to end of vector V (no header, unspecified alignment)
Definition: vec.h:666
#define static_always_inline
Definition: clib.h:106
#define VLIB_INIT_FUNCTION(x)
Definition: init.h:173
static_always_inline u32 openssl_ops_dec_gcm(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher)
Definition: main.c:229
#define vec_elt_at_index(v, i)
Get vector value at index i checking that i is in bounds.
unsigned int u32
Definition: types.h:88
vnet_crypto_op_id_t op
Definition: crypto.h:232
long ctx[MAX_CONNS]
Definition: main.c:144
clib_error_t * crypto_openssl_init(vlib_main_t *vm)
Definition: main.c:357
vlib_main_t * vm
Definition: in2out_ed.c:1599
#define VNET_CRYPTO_OP_FLAG_HMAC_CHECK
Definition: crypto.h:236
EVP_CIPHER_CTX * evp_cipher_ctx
Definition: main.c:30
u8 len
Definition: ip_types.api:92
#define foreach_openssl_hmac_op
Definition: main.c:52
#define VNET_CRYPTO_OP_FLAG_INIT_IV
Definition: crypto.h:235
sll srl srl sll sra u16x4 i
Definition: vector_sse42.h:317
#define vec_free(V)
Free vector&#39;s memory (no header).
Definition: vec.h:380
typedef key
Definition: ipsec_types.api:85
template key/value backing page structure
Definition: bihash_doc.h:44
static_always_inline vnet_crypto_key_t * vnet_crypto_get_key(vnet_crypto_key_index_t index)
Definition: crypto.h:503
#define foreach_openssl_evp_op
Definition: main.c:39
#define VLIB_BUFFER_DEFAULT_DATA_SIZE
Definition: buffer.h:53
#define vec_len(v)
Number of elements in vector (rvalue-only, NULL tolerant)
u32 pid
Definition: dhcp.api:164
vnet_crypto_op_status_t status
Definition: crypto.h:233
static vlib_thread_main_t * vlib_get_thread_main()
Definition: global_funcs.h:32
#define vec_foreach(var, vec)
Vector iterator.
#define CLIB_CACHE_LINE_BYTES
Definition: cache.h:59
u32 vnet_crypto_register_engine(vlib_main_t *vm, char *name, int prio, char *desc)
Definition: crypto.c:112
#define VLIB_INITS(...)
Definition: init.h:344
static_always_inline u32 openssl_ops_enc_gcm(vlib_main_t *vm, vnet_crypto_op_t *ops[], vnet_crypto_op_chunk_t *chunks, u32 n_ops, const EVP_CIPHER *cipher)
Definition: main.c:187