Root/target/linux/generic/files/crypto/ocf/cryptocteon/cavium_crypto.c

1/*
2 * Copyright (c) 2009 David McCullough <david.mccullough@securecomputing.com>
3 *
4 * Copyright (c) 2003-2007 Cavium Networks (support@cavium.com). All rights
5 * reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions are met:
9 * 1. Redistributions of source code must retain the above copyright notice,
10 * this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright notice,
12 * this list of conditions and the following disclaimer in the documentation
13 * and/or other materials provided with the distribution.
14 * 3. All advertising materials mentioning features or use of this software
15 * must display the following acknowledgement:
16 * This product includes software developed by Cavium Networks
17 * 4. Cavium Networks' name may not be used to endorse or promote products
18 * derived from this software without specific prior written permission.
19 *
20 * This Software, including technical data, may be subject to U.S. export
21 * control laws, including the U.S. Export Administration Act and its
22 * associated regulations, and may be subject to export or import regulations
23 * in other countries. You warrant that You will comply strictly in all
24 * respects with all such regulations and acknowledge that you have the
25 * responsibility to obtain licenses to export, re-export or import the
26 * Software.
27 *
28 * TO THE MAXIMUM EXTENT PERMITTED BY LAW, THE SOFTWARE IS PROVIDED "AS IS" AND
29 * WITH ALL FAULTS AND CAVIUM MAKES NO PROMISES, REPRESENTATIONS OR WARRANTIES,
30 * EITHER EXPRESS, IMPLIED, STATUTORY, OR OTHERWISE, WITH RESPECT TO THE
31 * SOFTWARE, INCLUDING ITS CONDITION, ITS CONFORMITY TO ANY REPRESENTATION OR
32 * DESCRIPTION, OR THE EXISTENCE OF ANY LATENT OR PATENT DEFECTS, AND CAVIUM
33 * SPECIFICALLY DISCLAIMS ALL IMPLIED (IF ANY) WARRANTIES OF TITLE,
34 * MERCHANTABILITY, NONINFRINGEMENT, FITNESS FOR A PARTICULAR PURPOSE, LACK OF
35 * VIRUSES, ACCURACY OR COMPLETENESS, QUIET ENJOYMENT, QUIET POSSESSION OR
36 * CORRESPONDENCE TO DESCRIPTION. THE ENTIRE RISK ARISING OUT OF USE OR
37 * PERFORMANCE OF THE SOFTWARE LIES WITH YOU.
38*/
39/****************************************************************************/
40
41#include <linux/scatterlist.h>
42#include <asm/octeon/octeon.h>
43#include "octeon-asm.h"
44
45/****************************************************************************/
46
47extern unsigned long octeon_crypto_enable(struct octeon_cop2_state *);
48extern void octeon_crypto_disable(struct octeon_cop2_state *, unsigned long);
49
50#define SG_INIT(s, p, i, l) \
51    { \
52        (i) = 0; \
53        (l) = (s)[0].length; \
54        (p) = (typeof(p)) sg_virt((s)); \
55        CVMX_PREFETCH0((p)); \
56    }
57
58#define SG_CONSUME(s, p, i, l) \
59    { \
60        (p)++; \
61        (l) -= sizeof(*(p)); \
62        if ((l) < 0) { \
63            dprintk("%s, %d: l = %d\n", __FILE__, __LINE__, l); \
64        } else if ((l) == 0) { \
65            (i)++; \
66            (l) = (s)[0].length; \
67            (p) = (typeof(p)) sg_virt(s); \
68            CVMX_PREFETCH0((p)); \
69        } \
70    }
71
72#define ESP_HEADER_LENGTH 8
73#define DES_CBC_IV_LENGTH 8
74#define AES_CBC_IV_LENGTH 16
75#define ESP_HMAC_LEN 12
76
77#define ESP_HEADER_LENGTH 8
78#define DES_CBC_IV_LENGTH 8
79
80/****************************************************************************/
81
82#define CVM_LOAD_SHA_UNIT(dat, next) { \
83   if (next == 0) { \
84      next = 1; \
85      CVMX_MT_HSH_DAT (dat, 0); \
86   } else if (next == 1) { \
87      next = 2; \
88      CVMX_MT_HSH_DAT (dat, 1); \
89   } else if (next == 2) { \
90      next = 3; \
91      CVMX_MT_HSH_DAT (dat, 2); \
92   } else if (next == 3) { \
93      next = 4; \
94      CVMX_MT_HSH_DAT (dat, 3); \
95   } else if (next == 4) { \
96      next = 5; \
97      CVMX_MT_HSH_DAT (dat, 4); \
98   } else if (next == 5) { \
99      next = 6; \
100      CVMX_MT_HSH_DAT (dat, 5); \
101   } else if (next == 6) { \
102      next = 7; \
103      CVMX_MT_HSH_DAT (dat, 6); \
104   } else { \
105     CVMX_MT_HSH_STARTSHA (dat); \
106     next = 0; \
107   } \
108}
109
110#define CVM_LOAD2_SHA_UNIT(dat1, dat2, next) { \
111   if (next == 0) { \
112      CVMX_MT_HSH_DAT (dat1, 0); \
113      CVMX_MT_HSH_DAT (dat2, 1); \
114      next = 2; \
115   } else if (next == 1) { \
116      CVMX_MT_HSH_DAT (dat1, 1); \
117      CVMX_MT_HSH_DAT (dat2, 2); \
118      next = 3; \
119   } else if (next == 2) { \
120      CVMX_MT_HSH_DAT (dat1, 2); \
121      CVMX_MT_HSH_DAT (dat2, 3); \
122      next = 4; \
123   } else if (next == 3) { \
124      CVMX_MT_HSH_DAT (dat1, 3); \
125      CVMX_MT_HSH_DAT (dat2, 4); \
126      next = 5; \
127   } else if (next == 4) { \
128      CVMX_MT_HSH_DAT (dat1, 4); \
129      CVMX_MT_HSH_DAT (dat2, 5); \
130      next = 6; \
131   } else if (next == 5) { \
132      CVMX_MT_HSH_DAT (dat1, 5); \
133      CVMX_MT_HSH_DAT (dat2, 6); \
134      next = 7; \
135   } else if (next == 6) { \
136      CVMX_MT_HSH_DAT (dat1, 6); \
137      CVMX_MT_HSH_STARTSHA (dat2); \
138      next = 0; \
139   } else { \
140     CVMX_MT_HSH_STARTSHA (dat1); \
141     CVMX_MT_HSH_DAT (dat2, 0); \
142     next = 1; \
143   } \
144}
145
146/****************************************************************************/
147
148#define CVM_LOAD_MD5_UNIT(dat, next) { \
149   if (next == 0) { \
150      next = 1; \
151      CVMX_MT_HSH_DAT (dat, 0); \
152   } else if (next == 1) { \
153      next = 2; \
154      CVMX_MT_HSH_DAT (dat, 1); \
155   } else if (next == 2) { \
156      next = 3; \
157      CVMX_MT_HSH_DAT (dat, 2); \
158   } else if (next == 3) { \
159      next = 4; \
160      CVMX_MT_HSH_DAT (dat, 3); \
161   } else if (next == 4) { \
162      next = 5; \
163      CVMX_MT_HSH_DAT (dat, 4); \
164   } else if (next == 5) { \
165      next = 6; \
166      CVMX_MT_HSH_DAT (dat, 5); \
167   } else if (next == 6) { \
168      next = 7; \
169      CVMX_MT_HSH_DAT (dat, 6); \
170   } else { \
171     CVMX_MT_HSH_STARTMD5 (dat); \
172     next = 0; \
173   } \
174}
175
176#define CVM_LOAD2_MD5_UNIT(dat1, dat2, next) { \
177   if (next == 0) { \
178      CVMX_MT_HSH_DAT (dat1, 0); \
179      CVMX_MT_HSH_DAT (dat2, 1); \
180      next = 2; \
181   } else if (next == 1) { \
182      CVMX_MT_HSH_DAT (dat1, 1); \
183      CVMX_MT_HSH_DAT (dat2, 2); \
184      next = 3; \
185   } else if (next == 2) { \
186      CVMX_MT_HSH_DAT (dat1, 2); \
187      CVMX_MT_HSH_DAT (dat2, 3); \
188      next = 4; \
189   } else if (next == 3) { \
190      CVMX_MT_HSH_DAT (dat1, 3); \
191      CVMX_MT_HSH_DAT (dat2, 4); \
192      next = 5; \
193   } else if (next == 4) { \
194      CVMX_MT_HSH_DAT (dat1, 4); \
195      CVMX_MT_HSH_DAT (dat2, 5); \
196      next = 6; \
197   } else if (next == 5) { \
198      CVMX_MT_HSH_DAT (dat1, 5); \
199      CVMX_MT_HSH_DAT (dat2, 6); \
200      next = 7; \
201   } else if (next == 6) { \
202      CVMX_MT_HSH_DAT (dat1, 6); \
203      CVMX_MT_HSH_STARTMD5 (dat2); \
204      next = 0; \
205   } else { \
206     CVMX_MT_HSH_STARTMD5 (dat1); \
207     CVMX_MT_HSH_DAT (dat2, 0); \
208     next = 1; \
209   } \
210}
211
212/****************************************************************************/
213
214static inline uint64_t
215swap64(uint64_t a)
216{
217    return ((a >> 56) |
218       (((a >> 48) & 0xfful) << 8) |
219       (((a >> 40) & 0xfful) << 16) |
220       (((a >> 32) & 0xfful) << 24) |
221       (((a >> 24) & 0xfful) << 32) |
222       (((a >> 16) & 0xfful) << 40) |
223       (((a >> 8) & 0xfful) << 48) | (((a >> 0) & 0xfful) << 56));
224}
225
226/****************************************************************************/
227
228void
229octo_calc_hash(__u8 auth, unsigned char *key, uint64_t *inner, uint64_t *outer)
230{
231    uint8_t hash_key[64];
232    uint64_t *key1;
233    register uint64_t xor1 = 0x3636363636363636ULL;
234    register uint64_t xor2 = 0x5c5c5c5c5c5c5c5cULL;
235    struct octeon_cop2_state state;
236    unsigned long flags;
237
238    dprintk("%s()\n", __FUNCTION__);
239
240    memset(hash_key, 0, sizeof(hash_key));
241    memcpy(hash_key, (uint8_t *) key, (auth ? 20 : 16));
242    key1 = (uint64_t *) hash_key;
243    flags = octeon_crypto_enable(&state);
244    if (auth) {
245       CVMX_MT_HSH_IV(0x67452301EFCDAB89ULL, 0);
246       CVMX_MT_HSH_IV(0x98BADCFE10325476ULL, 1);
247       CVMX_MT_HSH_IV(0xC3D2E1F000000000ULL, 2);
248    } else {
249       CVMX_MT_HSH_IV(0x0123456789ABCDEFULL, 0);
250       CVMX_MT_HSH_IV(0xFEDCBA9876543210ULL, 1);
251    }
252
253    CVMX_MT_HSH_DAT((*key1 ^ xor1), 0);
254    key1++;
255    CVMX_MT_HSH_DAT((*key1 ^ xor1), 1);
256    key1++;
257    CVMX_MT_HSH_DAT((*key1 ^ xor1), 2);
258    key1++;
259    CVMX_MT_HSH_DAT((*key1 ^ xor1), 3);
260    key1++;
261    CVMX_MT_HSH_DAT((*key1 ^ xor1), 4);
262    key1++;
263    CVMX_MT_HSH_DAT((*key1 ^ xor1), 5);
264    key1++;
265    CVMX_MT_HSH_DAT((*key1 ^ xor1), 6);
266    key1++;
267    if (auth)
268    CVMX_MT_HSH_STARTSHA((*key1 ^ xor1));
269    else
270    CVMX_MT_HSH_STARTMD5((*key1 ^ xor1));
271
272    CVMX_MF_HSH_IV(inner[0], 0);
273    CVMX_MF_HSH_IV(inner[1], 1);
274    if (auth) {
275    inner[2] = 0;
276    CVMX_MF_HSH_IV(((uint64_t *) inner)[2], 2);
277    }
278
279    memset(hash_key, 0, sizeof(hash_key));
280    memcpy(hash_key, (uint8_t *) key, (auth ? 20 : 16));
281    key1 = (uint64_t *) hash_key;
282    if (auth) {
283      CVMX_MT_HSH_IV(0x67452301EFCDAB89ULL, 0);
284      CVMX_MT_HSH_IV(0x98BADCFE10325476ULL, 1);
285      CVMX_MT_HSH_IV(0xC3D2E1F000000000ULL, 2);
286    } else {
287      CVMX_MT_HSH_IV(0x0123456789ABCDEFULL, 0);
288      CVMX_MT_HSH_IV(0xFEDCBA9876543210ULL, 1);
289    }
290
291    CVMX_MT_HSH_DAT((*key1 ^ xor2), 0);
292    key1++;
293    CVMX_MT_HSH_DAT((*key1 ^ xor2), 1);
294    key1++;
295    CVMX_MT_HSH_DAT((*key1 ^ xor2), 2);
296    key1++;
297    CVMX_MT_HSH_DAT((*key1 ^ xor2), 3);
298    key1++;
299    CVMX_MT_HSH_DAT((*key1 ^ xor2), 4);
300    key1++;
301    CVMX_MT_HSH_DAT((*key1 ^ xor2), 5);
302    key1++;
303    CVMX_MT_HSH_DAT((*key1 ^ xor2), 6);
304    key1++;
305    if (auth)
306       CVMX_MT_HSH_STARTSHA((*key1 ^ xor2));
307    else
308       CVMX_MT_HSH_STARTMD5((*key1 ^ xor2));
309
310    CVMX_MF_HSH_IV(outer[0], 0);
311    CVMX_MF_HSH_IV(outer[1], 1);
312    if (auth) {
313      outer[2] = 0;
314      CVMX_MF_HSH_IV(outer[2], 2);
315    }
316    octeon_crypto_disable(&state, flags);
317    return;
318}
319
320/****************************************************************************/
321/* DES functions */
322
323int
324octo_des_cbc_encrypt(
325    struct octo_sess *od,
326    struct scatterlist *sg, int sg_len,
327    int auth_off, int auth_len,
328    int crypt_off, int crypt_len,
329    int icv_off, uint8_t *ivp)
330{
331    uint64_t *data;
332    int data_i, data_l;
333    struct octeon_cop2_state state;
334    unsigned long flags;
335
336    dprintk("%s()\n", __FUNCTION__);
337
338    if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
339        (crypt_off & 0x7) || (crypt_off + crypt_len > sg_len))) {
340    dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
341        "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
342        "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
343        auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
344    return -EINVAL;
345    }
346
347    SG_INIT(sg, data, data_i, data_l);
348
349    CVMX_PREFETCH0(ivp);
350    CVMX_PREFETCH0(od->octo_enckey);
351
352    flags = octeon_crypto_enable(&state);
353
354    /* load 3DES Key */
355    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
356    if (od->octo_encklen == 24) {
357    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
358    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
359    } else if (od->octo_encklen == 8) {
360    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
361    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
362    } else {
363    octeon_crypto_disable(&state, flags);
364    dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
365    return -EINVAL;
366    }
367
368    CVMX_MT_3DES_IV(* (uint64_t *) ivp);
369
370    while (crypt_off > 0) {
371    SG_CONSUME(sg, data, data_i, data_l);
372    crypt_off -= 8;
373    }
374
375    while (crypt_len > 0) {
376    CVMX_MT_3DES_ENC_CBC(*data);
377    CVMX_MF_3DES_RESULT(*data);
378    SG_CONSUME(sg, data, data_i, data_l);
379    crypt_len -= 8;
380    }
381
382    octeon_crypto_disable(&state, flags);
383    return 0;
384}
385
386
387int
388octo_des_cbc_decrypt(
389    struct octo_sess *od,
390    struct scatterlist *sg, int sg_len,
391    int auth_off, int auth_len,
392    int crypt_off, int crypt_len,
393    int icv_off, uint8_t *ivp)
394{
395    uint64_t *data;
396    int data_i, data_l;
397    struct octeon_cop2_state state;
398    unsigned long flags;
399
400    dprintk("%s()\n", __FUNCTION__);
401
402    if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
403        (crypt_off & 0x7) || (crypt_off + crypt_len > sg_len))) {
404    dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
405        "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
406        "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
407        auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
408    return -EINVAL;
409    }
410
411    SG_INIT(sg, data, data_i, data_l);
412
413    CVMX_PREFETCH0(ivp);
414    CVMX_PREFETCH0(od->octo_enckey);
415
416    flags = octeon_crypto_enable(&state);
417
418    /* load 3DES Key */
419    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
420    if (od->octo_encklen == 24) {
421    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
422    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
423    } else if (od->octo_encklen == 8) {
424    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
425    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
426    } else {
427    octeon_crypto_disable(&state, flags);
428    dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
429    return -EINVAL;
430    }
431
432    CVMX_MT_3DES_IV(* (uint64_t *) ivp);
433
434    while (crypt_off > 0) {
435    SG_CONSUME(sg, data, data_i, data_l);
436    crypt_off -= 8;
437    }
438
439    while (crypt_len > 0) {
440    CVMX_MT_3DES_DEC_CBC(*data);
441    CVMX_MF_3DES_RESULT(*data);
442    SG_CONSUME(sg, data, data_i, data_l);
443    crypt_len -= 8;
444    }
445
446    octeon_crypto_disable(&state, flags);
447    return 0;
448}
449
450/****************************************************************************/
451/* AES functions */
452
453int
454octo_aes_cbc_encrypt(
455    struct octo_sess *od,
456    struct scatterlist *sg, int sg_len,
457    int auth_off, int auth_len,
458    int crypt_off, int crypt_len,
459    int icv_off, uint8_t *ivp)
460{
461    uint64_t *data, *pdata;
462    int data_i, data_l;
463    struct octeon_cop2_state state;
464    unsigned long flags;
465
466    dprintk("%s()\n", __FUNCTION__);
467
468    if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
469        (crypt_off & 0x7) || (crypt_off + crypt_len > sg_len))) {
470    dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
471        "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
472        "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
473        auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
474    return -EINVAL;
475    }
476
477    SG_INIT(sg, data, data_i, data_l);
478
479    CVMX_PREFETCH0(ivp);
480    CVMX_PREFETCH0(od->octo_enckey);
481
482    flags = octeon_crypto_enable(&state);
483
484    /* load AES Key */
485    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
486    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
487
488    if (od->octo_encklen == 16) {
489    CVMX_MT_AES_KEY(0x0, 2);
490    CVMX_MT_AES_KEY(0x0, 3);
491    } else if (od->octo_encklen == 24) {
492    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
493    CVMX_MT_AES_KEY(0x0, 3);
494    } else if (od->octo_encklen == 32) {
495    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
496    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
497    } else {
498    octeon_crypto_disable(&state, flags);
499    dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
500    return -EINVAL;
501    }
502    CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
503
504    CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
505    CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
506
507    while (crypt_off > 0) {
508    SG_CONSUME(sg, data, data_i, data_l);
509    crypt_off -= 8;
510    }
511
512    while (crypt_len > 0) {
513    pdata = data;
514    CVMX_MT_AES_ENC_CBC0(*data);
515    SG_CONSUME(sg, data, data_i, data_l);
516    CVMX_MT_AES_ENC_CBC1(*data);
517    CVMX_MF_AES_RESULT(*pdata, 0);
518    CVMX_MF_AES_RESULT(*data, 1);
519    SG_CONSUME(sg, data, data_i, data_l);
520    crypt_len -= 16;
521    }
522
523    octeon_crypto_disable(&state, flags);
524    return 0;
525}
526
527
528int
529octo_aes_cbc_decrypt(
530    struct octo_sess *od,
531    struct scatterlist *sg, int sg_len,
532    int auth_off, int auth_len,
533    int crypt_off, int crypt_len,
534    int icv_off, uint8_t *ivp)
535{
536    uint64_t *data, *pdata;
537    int data_i, data_l;
538    struct octeon_cop2_state state;
539    unsigned long flags;
540
541    dprintk("%s()\n", __FUNCTION__);
542
543    if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
544        (crypt_off & 0x7) || (crypt_off + crypt_len > sg_len))) {
545    dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
546        "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
547        "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
548        auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
549    return -EINVAL;
550    }
551
552    SG_INIT(sg, data, data_i, data_l);
553
554    CVMX_PREFETCH0(ivp);
555    CVMX_PREFETCH0(od->octo_enckey);
556
557    flags = octeon_crypto_enable(&state);
558
559    /* load AES Key */
560    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
561    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
562
563    if (od->octo_encklen == 16) {
564    CVMX_MT_AES_KEY(0x0, 2);
565    CVMX_MT_AES_KEY(0x0, 3);
566    } else if (od->octo_encklen == 24) {
567    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
568    CVMX_MT_AES_KEY(0x0, 3);
569    } else if (od->octo_encklen == 32) {
570    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
571    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
572    } else {
573    octeon_crypto_disable(&state, flags);
574    dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
575    return -EINVAL;
576    }
577    CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
578
579    CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
580    CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
581
582    while (crypt_off > 0) {
583    SG_CONSUME(sg, data, data_i, data_l);
584    crypt_off -= 8;
585    }
586
587    while (crypt_len > 0) {
588    pdata = data;
589    CVMX_MT_AES_DEC_CBC0(*data);
590    SG_CONSUME(sg, data, data_i, data_l);
591    CVMX_MT_AES_DEC_CBC1(*data);
592    CVMX_MF_AES_RESULT(*pdata, 0);
593    CVMX_MF_AES_RESULT(*data, 1);
594    SG_CONSUME(sg, data, data_i, data_l);
595    crypt_len -= 16;
596    }
597
598    octeon_crypto_disable(&state, flags);
599    return 0;
600}
601
602/****************************************************************************/
603/* MD5 */
604
605int
606octo_null_md5_encrypt(
607    struct octo_sess *od,
608    struct scatterlist *sg, int sg_len,
609    int auth_off, int auth_len,
610    int crypt_off, int crypt_len,
611    int icv_off, uint8_t *ivp)
612{
613    register int next = 0;
614    uint64_t *data;
615    uint64_t tmp1, tmp2;
616    int data_i, data_l, alen = auth_len;
617    struct octeon_cop2_state state;
618    unsigned long flags;
619
620    dprintk("%s()\n", __FUNCTION__);
621
622    if (unlikely(od == NULL || sg==NULL || sg_len==0 ||
623        (auth_off & 0x7) || (auth_off + auth_len > sg_len))) {
624    dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
625        "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
626        "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
627        auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
628    return -EINVAL;
629    }
630
631    SG_INIT(sg, data, data_i, data_l);
632
633    flags = octeon_crypto_enable(&state);
634
635    /* Load MD5 IV */
636    CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
637    CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
638
639    while (auth_off > 0) {
640    SG_CONSUME(sg, data, data_i, data_l);
641    auth_off -= 8;
642    }
643
644    while (auth_len > 0) {
645    CVM_LOAD_MD5_UNIT(*data, next);
646    auth_len -= 8;
647    SG_CONSUME(sg, data, data_i, data_l);
648    }
649
650    /* finish the hash */
651    CVMX_PREFETCH0(od->octo_hmouter);
652#if 0
653    if (unlikely(inplen)) {
654    uint64_t tmp = 0;
655    uint8_t *p = (uint8_t *) & tmp;
656    p[inplen] = 0x80;
657    do {
658        inplen--;
659        p[inplen] = ((uint8_t *) data)[inplen];
660    } while (inplen);
661    CVM_LOAD_MD5_UNIT(tmp, next);
662    } else {
663    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
664    }
665#else
666    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
667#endif
668
669    /* Finish Inner hash */
670    while (next != 7) {
671    CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
672    }
673    CVMX_ES64(tmp1, ((alen + 64) << 3));
674    CVM_LOAD_MD5_UNIT(tmp1, next);
675
676    /* Get the inner hash of HMAC */
677    CVMX_MF_HSH_IV(tmp1, 0);
678    CVMX_MF_HSH_IV(tmp2, 1);
679
680    /* Initialize hash unit */
681    CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
682    CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
683
684    CVMX_MT_HSH_DAT(tmp1, 0);
685    CVMX_MT_HSH_DAT(tmp2, 1);
686    CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
687    CVMX_MT_HSH_DATZ(3);
688    CVMX_MT_HSH_DATZ(4);
689    CVMX_MT_HSH_DATZ(5);
690    CVMX_MT_HSH_DATZ(6);
691    CVMX_ES64(tmp1, ((64 + 16) << 3));
692    CVMX_MT_HSH_STARTMD5(tmp1);
693
694    /* save the HMAC */
695    SG_INIT(sg, data, data_i, data_l);
696    while (icv_off > 0) {
697    SG_CONSUME(sg, data, data_i, data_l);
698    icv_off -= 8;
699    }
700    CVMX_MF_HSH_IV(*data, 0);
701    SG_CONSUME(sg, data, data_i, data_l);
702    CVMX_MF_HSH_IV(tmp1, 1);
703    *(uint32_t *)data = (uint32_t) (tmp1 >> 32);
704
705    octeon_crypto_disable(&state, flags);
706    return 0;
707}
708
709/****************************************************************************/
710/* SHA1 */
711
712int
713octo_null_sha1_encrypt(
714    struct octo_sess *od,
715    struct scatterlist *sg, int sg_len,
716    int auth_off, int auth_len,
717    int crypt_off, int crypt_len,
718    int icv_off, uint8_t *ivp)
719{
720    register int next = 0;
721    uint64_t *data;
722    uint64_t tmp1, tmp2, tmp3;
723    int data_i, data_l, alen = auth_len;
724    struct octeon_cop2_state state;
725    unsigned long flags;
726
727    dprintk("%s()\n", __FUNCTION__);
728
729    if (unlikely(od == NULL || sg==NULL || sg_len==0 ||
730        (auth_off & 0x7) || (auth_off + auth_len > sg_len))) {
731    dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
732        "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
733        "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
734        auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
735    return -EINVAL;
736    }
737
738    SG_INIT(sg, data, data_i, data_l);
739
740    flags = octeon_crypto_enable(&state);
741
742    /* Load SHA1 IV */
743    CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
744    CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
745    CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
746
747    while (auth_off > 0) {
748    SG_CONSUME(sg, data, data_i, data_l);
749    auth_off -= 8;
750    }
751
752    while (auth_len > 0) {
753    CVM_LOAD_SHA_UNIT(*data, next);
754    auth_len -= 8;
755    SG_CONSUME(sg, data, data_i, data_l);
756    }
757
758    /* finish the hash */
759    CVMX_PREFETCH0(od->octo_hmouter);
760#if 0
761    if (unlikely(inplen)) {
762    uint64_t tmp = 0;
763    uint8_t *p = (uint8_t *) & tmp;
764    p[inplen] = 0x80;
765    do {
766        inplen--;
767        p[inplen] = ((uint8_t *) data)[inplen];
768    } while (inplen);
769    CVM_LOAD_MD5_UNIT(tmp, next);
770    } else {
771    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
772    }
773#else
774    CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
775#endif
776
777    /* Finish Inner hash */
778    while (next != 7) {
779    CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
780    }
781    CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
782
783    /* Get the inner hash of HMAC */
784    CVMX_MF_HSH_IV(tmp1, 0);
785    CVMX_MF_HSH_IV(tmp2, 1);
786    tmp3 = 0;
787    CVMX_MF_HSH_IV(tmp3, 2);
788
789    /* Initialize hash unit */
790    CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
791    CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
792    CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
793
794    CVMX_MT_HSH_DAT(tmp1, 0);
795    CVMX_MT_HSH_DAT(tmp2, 1);
796    tmp3 |= 0x0000000080000000;
797    CVMX_MT_HSH_DAT(tmp3, 2);
798    CVMX_MT_HSH_DATZ(3);
799    CVMX_MT_HSH_DATZ(4);
800    CVMX_MT_HSH_DATZ(5);
801    CVMX_MT_HSH_DATZ(6);
802    CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
803
804    /* save the HMAC */
805    SG_INIT(sg, data, data_i, data_l);
806    while (icv_off > 0) {
807    SG_CONSUME(sg, data, data_i, data_l);
808    icv_off -= 8;
809    }
810    CVMX_MF_HSH_IV(*data, 0);
811    SG_CONSUME(sg, data, data_i, data_l);
812    CVMX_MF_HSH_IV(tmp1, 1);
813    *(uint32_t *)data = (uint32_t) (tmp1 >> 32);
814
815    octeon_crypto_disable(&state, flags);
816    return 0;
817}
818
819/****************************************************************************/
820/* DES MD5 */
821
822int
823octo_des_cbc_md5_encrypt(
824    struct octo_sess *od,
825    struct scatterlist *sg, int sg_len,
826    int auth_off, int auth_len,
827    int crypt_off, int crypt_len,
828    int icv_off, uint8_t *ivp)
829{
830    register int next = 0;
831    union {
832    uint32_t data32[2];
833    uint64_t data64[1];
834    } mydata;
835    uint64_t *data = &mydata.data64[0];
836    uint32_t *data32;
837    uint64_t tmp1, tmp2;
838    int data_i, data_l, alen = auth_len;
839    struct octeon_cop2_state state;
840    unsigned long flags;
841
842    dprintk("%s()\n", __FUNCTION__);
843
844    if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
845        (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
846        (crypt_len & 0x7) ||
847        (auth_len & 0x7) ||
848        (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
849    dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
850        "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
851        "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
852        auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
853    return -EINVAL;
854    }
855
856    SG_INIT(sg, data32, data_i, data_l);
857
858    CVMX_PREFETCH0(ivp);
859    CVMX_PREFETCH0(od->octo_enckey);
860
861    flags = octeon_crypto_enable(&state);
862
863    /* load 3DES Key */
864    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
865    if (od->octo_encklen == 24) {
866    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
867    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
868    } else if (od->octo_encklen == 8) {
869    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
870    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
871    } else {
872    octeon_crypto_disable(&state, flags);
873    dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
874    return -EINVAL;
875    }
876
877    CVMX_MT_3DES_IV(* (uint64_t *) ivp);
878
879    /* Load MD5 IV */
880    CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
881    CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
882
883    while (crypt_off > 0 && auth_off > 0) {
884    SG_CONSUME(sg, data32, data_i, data_l);
885    crypt_off -= 4;
886    auth_off -= 4;
887    }
888
889    while (crypt_len > 0 || auth_len > 0) {
890        uint32_t *first = data32;
891    mydata.data32[0] = *first;
892    SG_CONSUME(sg, data32, data_i, data_l);
893    mydata.data32[1] = *data32;
894        if (crypt_off <= 0) {
895        if (crypt_len > 0) {
896        CVMX_MT_3DES_ENC_CBC(*data);
897        CVMX_MF_3DES_RESULT(*data);
898        crypt_len -= 8;
899        }
900    } else
901        crypt_off -= 8;
902        if (auth_off <= 0) {
903        if (auth_len > 0) {
904        CVM_LOAD_MD5_UNIT(*data, next);
905        auth_len -= 8;
906        }
907    } else
908        auth_off -= 8;
909    *first = mydata.data32[0];
910    *data32 = mydata.data32[1];
911    SG_CONSUME(sg, data32, data_i, data_l);
912    }
913
914    /* finish the hash */
915    CVMX_PREFETCH0(od->octo_hmouter);
916#if 0
917    if (unlikely(inplen)) {
918    uint64_t tmp = 0;
919    uint8_t *p = (uint8_t *) & tmp;
920    p[inplen] = 0x80;
921    do {
922        inplen--;
923        p[inplen] = ((uint8_t *) data)[inplen];
924    } while (inplen);
925    CVM_LOAD_MD5_UNIT(tmp, next);
926    } else {
927    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
928    }
929#else
930    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
931#endif
932
933    /* Finish Inner hash */
934    while (next != 7) {
935    CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
936    }
937    CVMX_ES64(tmp1, ((alen + 64) << 3));
938    CVM_LOAD_MD5_UNIT(tmp1, next);
939
940    /* Get the inner hash of HMAC */
941    CVMX_MF_HSH_IV(tmp1, 0);
942    CVMX_MF_HSH_IV(tmp2, 1);
943
944    /* Initialize hash unit */
945    CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
946    CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
947
948    CVMX_MT_HSH_DAT(tmp1, 0);
949    CVMX_MT_HSH_DAT(tmp2, 1);
950    CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
951    CVMX_MT_HSH_DATZ(3);
952    CVMX_MT_HSH_DATZ(4);
953    CVMX_MT_HSH_DATZ(5);
954    CVMX_MT_HSH_DATZ(6);
955    CVMX_ES64(tmp1, ((64 + 16) << 3));
956    CVMX_MT_HSH_STARTMD5(tmp1);
957
958    /* save the HMAC */
959    SG_INIT(sg, data32, data_i, data_l);
960    while (icv_off > 0) {
961    SG_CONSUME(sg, data32, data_i, data_l);
962    icv_off -= 4;
963    }
964    CVMX_MF_HSH_IV(tmp1, 0);
965    *data32 = (uint32_t) (tmp1 >> 32);
966    SG_CONSUME(sg, data32, data_i, data_l);
967    *data32 = (uint32_t) tmp1;
968    SG_CONSUME(sg, data32, data_i, data_l);
969    CVMX_MF_HSH_IV(tmp1, 1);
970    *data32 = (uint32_t) (tmp1 >> 32);
971
972    octeon_crypto_disable(&state, flags);
973    return 0;
974}
975
976int
977octo_des_cbc_md5_decrypt(
978    struct octo_sess *od,
979    struct scatterlist *sg, int sg_len,
980    int auth_off, int auth_len,
981    int crypt_off, int crypt_len,
982    int icv_off, uint8_t *ivp)
983{
984    register int next = 0;
985    union {
986    uint32_t data32[2];
987    uint64_t data64[1];
988    } mydata;
989    uint64_t *data = &mydata.data64[0];
990    uint32_t *data32;
991    uint64_t tmp1, tmp2;
992    int data_i, data_l, alen = auth_len;
993    struct octeon_cop2_state state;
994    unsigned long flags;
995
996    dprintk("%s()\n", __FUNCTION__);
997
998    if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
999        (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
1000        (crypt_len & 0x7) ||
1001        (auth_len & 0x7) ||
1002        (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
1003    dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
1004        "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1005        "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
1006        auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
1007    return -EINVAL;
1008    }
1009
1010    SG_INIT(sg, data32, data_i, data_l);
1011
1012    CVMX_PREFETCH0(ivp);
1013    CVMX_PREFETCH0(od->octo_enckey);
1014
1015    flags = octeon_crypto_enable(&state);
1016
1017    /* load 3DES Key */
1018    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
1019    if (od->octo_encklen == 24) {
1020    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
1021    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1022    } else if (od->octo_encklen == 8) {
1023    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
1024    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
1025    } else {
1026    octeon_crypto_disable(&state, flags);
1027    dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
1028    return -EINVAL;
1029    }
1030
1031    CVMX_MT_3DES_IV(* (uint64_t *) ivp);
1032
1033    /* Load MD5 IV */
1034    CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
1035    CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
1036
1037    while (crypt_off > 0 && auth_off > 0) {
1038    SG_CONSUME(sg, data32, data_i, data_l);
1039    crypt_off -= 4;
1040    auth_off -= 4;
1041    }
1042
1043    while (crypt_len > 0 || auth_len > 0) {
1044        uint32_t *first = data32;
1045    mydata.data32[0] = *first;
1046    SG_CONSUME(sg, data32, data_i, data_l);
1047    mydata.data32[1] = *data32;
1048        if (auth_off <= 0) {
1049        if (auth_len > 0) {
1050        CVM_LOAD_MD5_UNIT(*data, next);
1051        auth_len -= 8;
1052        }
1053    } else
1054        auth_off -= 8;
1055        if (crypt_off <= 0) {
1056        if (crypt_len > 0) {
1057        CVMX_MT_3DES_DEC_CBC(*data);
1058        CVMX_MF_3DES_RESULT(*data);
1059        crypt_len -= 8;
1060        }
1061    } else
1062        crypt_off -= 8;
1063    *first = mydata.data32[0];
1064    *data32 = mydata.data32[1];
1065    SG_CONSUME(sg, data32, data_i, data_l);
1066    }
1067
1068    /* finish the hash */
1069    CVMX_PREFETCH0(od->octo_hmouter);
1070#if 0
1071    if (unlikely(inplen)) {
1072    uint64_t tmp = 0;
1073    uint8_t *p = (uint8_t *) & tmp;
1074    p[inplen] = 0x80;
1075    do {
1076        inplen--;
1077        p[inplen] = ((uint8_t *) data)[inplen];
1078    } while (inplen);
1079    CVM_LOAD_MD5_UNIT(tmp, next);
1080    } else {
1081    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
1082    }
1083#else
1084    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
1085#endif
1086
1087    /* Finish Inner hash */
1088    while (next != 7) {
1089    CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
1090    }
1091    CVMX_ES64(tmp1, ((alen + 64) << 3));
1092    CVM_LOAD_MD5_UNIT(tmp1, next);
1093
1094    /* Get the inner hash of HMAC */
1095    CVMX_MF_HSH_IV(tmp1, 0);
1096    CVMX_MF_HSH_IV(tmp2, 1);
1097
1098    /* Initialize hash unit */
1099    CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
1100    CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
1101
1102    CVMX_MT_HSH_DAT(tmp1, 0);
1103    CVMX_MT_HSH_DAT(tmp2, 1);
1104    CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
1105    CVMX_MT_HSH_DATZ(3);
1106    CVMX_MT_HSH_DATZ(4);
1107    CVMX_MT_HSH_DATZ(5);
1108    CVMX_MT_HSH_DATZ(6);
1109    CVMX_ES64(tmp1, ((64 + 16) << 3));
1110    CVMX_MT_HSH_STARTMD5(tmp1);
1111
1112    /* save the HMAC */
1113    SG_INIT(sg, data32, data_i, data_l);
1114    while (icv_off > 0) {
1115    SG_CONSUME(sg, data32, data_i, data_l);
1116    icv_off -= 4;
1117    }
1118    CVMX_MF_HSH_IV(tmp1, 0);
1119    *data32 = (uint32_t) (tmp1 >> 32);
1120    SG_CONSUME(sg, data32, data_i, data_l);
1121    *data32 = (uint32_t) tmp1;
1122    SG_CONSUME(sg, data32, data_i, data_l);
1123    CVMX_MF_HSH_IV(tmp1, 1);
1124    *data32 = (uint32_t) (tmp1 >> 32);
1125
1126    octeon_crypto_disable(&state, flags);
1127    return 0;
1128}
1129
1130/****************************************************************************/
1131/* DES SHA */
1132
1133int
1134octo_des_cbc_sha1_encrypt(
1135    struct octo_sess *od,
1136    struct scatterlist *sg, int sg_len,
1137    int auth_off, int auth_len,
1138    int crypt_off, int crypt_len,
1139    int icv_off, uint8_t *ivp)
1140{
1141    register int next = 0;
1142    union {
1143    uint32_t data32[2];
1144    uint64_t data64[1];
1145    } mydata;
1146    uint64_t *data = &mydata.data64[0];
1147    uint32_t *data32;
1148    uint64_t tmp1, tmp2, tmp3;
1149    int data_i, data_l, alen = auth_len;
1150    struct octeon_cop2_state state;
1151    unsigned long flags;
1152
1153    dprintk("%s()\n", __FUNCTION__);
1154
1155    if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
1156        (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
1157        (crypt_len & 0x7) ||
1158        (auth_len & 0x7) ||
1159        (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
1160    dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
1161        "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1162        "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
1163        auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
1164    return -EINVAL;
1165    }
1166
1167    SG_INIT(sg, data32, data_i, data_l);
1168
1169    CVMX_PREFETCH0(ivp);
1170    CVMX_PREFETCH0(od->octo_enckey);
1171
1172    flags = octeon_crypto_enable(&state);
1173
1174    /* load 3DES Key */
1175    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
1176    if (od->octo_encklen == 24) {
1177    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
1178    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1179    } else if (od->octo_encklen == 8) {
1180    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
1181    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
1182    } else {
1183    octeon_crypto_disable(&state, flags);
1184    dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
1185    return -EINVAL;
1186    }
1187
1188    CVMX_MT_3DES_IV(* (uint64_t *) ivp);
1189
1190    /* Load SHA1 IV */
1191    CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
1192    CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
1193    CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
1194
1195    while (crypt_off > 0 && auth_off > 0) {
1196    SG_CONSUME(sg, data32, data_i, data_l);
1197    crypt_off -= 4;
1198    auth_off -= 4;
1199    }
1200
1201    while (crypt_len > 0 || auth_len > 0) {
1202        uint32_t *first = data32;
1203    mydata.data32[0] = *first;
1204    SG_CONSUME(sg, data32, data_i, data_l);
1205    mydata.data32[1] = *data32;
1206        if (crypt_off <= 0) {
1207        if (crypt_len > 0) {
1208        CVMX_MT_3DES_ENC_CBC(*data);
1209        CVMX_MF_3DES_RESULT(*data);
1210        crypt_len -= 8;
1211        }
1212    } else
1213        crypt_off -= 8;
1214        if (auth_off <= 0) {
1215        if (auth_len > 0) {
1216        CVM_LOAD_SHA_UNIT(*data, next);
1217        auth_len -= 8;
1218        }
1219    } else
1220        auth_off -= 8;
1221    *first = mydata.data32[0];
1222    *data32 = mydata.data32[1];
1223    SG_CONSUME(sg, data32, data_i, data_l);
1224    }
1225
1226    /* finish the hash */
1227    CVMX_PREFETCH0(od->octo_hmouter);
1228#if 0
1229    if (unlikely(inplen)) {
1230    uint64_t tmp = 0;
1231    uint8_t *p = (uint8_t *) & tmp;
1232    p[inplen] = 0x80;
1233    do {
1234        inplen--;
1235        p[inplen] = ((uint8_t *) data)[inplen];
1236    } while (inplen);
1237    CVM_LOAD_SHA_UNIT(tmp, next);
1238    } else {
1239    CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
1240    }
1241#else
1242    CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
1243#endif
1244
1245    /* Finish Inner hash */
1246    while (next != 7) {
1247    CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
1248    }
1249    CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
1250
1251    /* Get the inner hash of HMAC */
1252    CVMX_MF_HSH_IV(tmp1, 0);
1253    CVMX_MF_HSH_IV(tmp2, 1);
1254    tmp3 = 0;
1255    CVMX_MF_HSH_IV(tmp3, 2);
1256
1257    /* Initialize hash unit */
1258    CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
1259    CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
1260    CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
1261
1262    CVMX_MT_HSH_DAT(tmp1, 0);
1263    CVMX_MT_HSH_DAT(tmp2, 1);
1264    tmp3 |= 0x0000000080000000;
1265    CVMX_MT_HSH_DAT(tmp3, 2);
1266    CVMX_MT_HSH_DATZ(3);
1267    CVMX_MT_HSH_DATZ(4);
1268    CVMX_MT_HSH_DATZ(5);
1269    CVMX_MT_HSH_DATZ(6);
1270    CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
1271
1272    /* save the HMAC */
1273    SG_INIT(sg, data32, data_i, data_l);
1274    while (icv_off > 0) {
1275    SG_CONSUME(sg, data32, data_i, data_l);
1276    icv_off -= 4;
1277    }
1278    CVMX_MF_HSH_IV(tmp1, 0);
1279    *data32 = (uint32_t) (tmp1 >> 32);
1280    SG_CONSUME(sg, data32, data_i, data_l);
1281    *data32 = (uint32_t) tmp1;
1282    SG_CONSUME(sg, data32, data_i, data_l);
1283    CVMX_MF_HSH_IV(tmp1, 1);
1284    *data32 = (uint32_t) (tmp1 >> 32);
1285
1286    octeon_crypto_disable(&state, flags);
1287    return 0;
1288}
1289
1290int
1291octo_des_cbc_sha1_decrypt(
1292    struct octo_sess *od,
1293    struct scatterlist *sg, int sg_len,
1294    int auth_off, int auth_len,
1295    int crypt_off, int crypt_len,
1296    int icv_off, uint8_t *ivp)
1297{
1298    register int next = 0;
1299    union {
1300    uint32_t data32[2];
1301    uint64_t data64[1];
1302    } mydata;
1303    uint64_t *data = &mydata.data64[0];
1304    uint32_t *data32;
1305    uint64_t tmp1, tmp2, tmp3;
1306    int data_i, data_l, alen = auth_len;
1307    struct octeon_cop2_state state;
1308    unsigned long flags;
1309
1310    dprintk("%s()\n", __FUNCTION__);
1311
1312    if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
1313        (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
1314        (crypt_len & 0x7) ||
1315        (auth_len & 0x7) ||
1316        (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
1317    dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
1318        "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1319        "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
1320        auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
1321    return -EINVAL;
1322    }
1323
1324    SG_INIT(sg, data32, data_i, data_l);
1325
1326    CVMX_PREFETCH0(ivp);
1327    CVMX_PREFETCH0(od->octo_enckey);
1328
1329    flags = octeon_crypto_enable(&state);
1330
1331    /* load 3DES Key */
1332    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
1333    if (od->octo_encklen == 24) {
1334    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
1335    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1336    } else if (od->octo_encklen == 8) {
1337    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 1);
1338    CVMX_MT_3DES_KEY(((uint64_t *) od->octo_enckey)[0], 2);
1339    } else {
1340    octeon_crypto_disable(&state, flags);
1341    dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
1342    return -EINVAL;
1343    }
1344
1345    CVMX_MT_3DES_IV(* (uint64_t *) ivp);
1346
1347    /* Load SHA1 IV */
1348    CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
1349    CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
1350    CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
1351
1352    while (crypt_off > 0 && auth_off > 0) {
1353    SG_CONSUME(sg, data32, data_i, data_l);
1354    crypt_off -= 4;
1355    auth_off -= 4;
1356    }
1357
1358    while (crypt_len > 0 || auth_len > 0) {
1359        uint32_t *first = data32;
1360    mydata.data32[0] = *first;
1361    SG_CONSUME(sg, data32, data_i, data_l);
1362    mydata.data32[1] = *data32;
1363        if (auth_off <= 0) {
1364        if (auth_len > 0) {
1365        CVM_LOAD_SHA_UNIT(*data, next);
1366        auth_len -= 8;
1367        }
1368    } else
1369        auth_off -= 8;
1370        if (crypt_off <= 0) {
1371        if (crypt_len > 0) {
1372        CVMX_MT_3DES_DEC_CBC(*data);
1373        CVMX_MF_3DES_RESULT(*data);
1374        crypt_len -= 8;
1375        }
1376    } else
1377        crypt_off -= 8;
1378    *first = mydata.data32[0];
1379    *data32 = mydata.data32[1];
1380    SG_CONSUME(sg, data32, data_i, data_l);
1381    }
1382
1383    /* finish the hash */
1384    CVMX_PREFETCH0(od->octo_hmouter);
1385#if 0
1386    if (unlikely(inplen)) {
1387    uint64_t tmp = 0;
1388    uint8_t *p = (uint8_t *) & tmp;
1389    p[inplen] = 0x80;
1390    do {
1391        inplen--;
1392        p[inplen] = ((uint8_t *) data)[inplen];
1393    } while (inplen);
1394    CVM_LOAD_SHA_UNIT(tmp, next);
1395    } else {
1396    CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
1397    }
1398#else
1399    CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
1400#endif
1401
1402    /* Finish Inner hash */
1403    while (next != 7) {
1404    CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
1405    }
1406    CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
1407
1408    /* Get the inner hash of HMAC */
1409    CVMX_MF_HSH_IV(tmp1, 0);
1410    CVMX_MF_HSH_IV(tmp2, 1);
1411    tmp3 = 0;
1412    CVMX_MF_HSH_IV(tmp3, 2);
1413
1414    /* Initialize hash unit */
1415    CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
1416    CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
1417    CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
1418
1419    CVMX_MT_HSH_DAT(tmp1, 0);
1420    CVMX_MT_HSH_DAT(tmp2, 1);
1421    tmp3 |= 0x0000000080000000;
1422    CVMX_MT_HSH_DAT(tmp3, 2);
1423    CVMX_MT_HSH_DATZ(3);
1424    CVMX_MT_HSH_DATZ(4);
1425    CVMX_MT_HSH_DATZ(5);
1426    CVMX_MT_HSH_DATZ(6);
1427    CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
1428    /* save the HMAC */
1429    SG_INIT(sg, data32, data_i, data_l);
1430    while (icv_off > 0) {
1431    SG_CONSUME(sg, data32, data_i, data_l);
1432    icv_off -= 4;
1433    }
1434    CVMX_MF_HSH_IV(tmp1, 0);
1435    *data32 = (uint32_t) (tmp1 >> 32);
1436    SG_CONSUME(sg, data32, data_i, data_l);
1437    *data32 = (uint32_t) tmp1;
1438    SG_CONSUME(sg, data32, data_i, data_l);
1439    CVMX_MF_HSH_IV(tmp1, 1);
1440    *data32 = (uint32_t) (tmp1 >> 32);
1441
1442    octeon_crypto_disable(&state, flags);
1443    return 0;
1444}
1445
1446/****************************************************************************/
1447/* AES MD5 */
1448
1449int
1450octo_aes_cbc_md5_encrypt(
1451    struct octo_sess *od,
1452    struct scatterlist *sg, int sg_len,
1453    int auth_off, int auth_len,
1454    int crypt_off, int crypt_len,
1455    int icv_off, uint8_t *ivp)
1456{
1457    register int next = 0;
1458    union {
1459    uint32_t data32[2];
1460    uint64_t data64[1];
1461    } mydata[2];
1462    uint64_t *pdata = &mydata[0].data64[0];
1463    uint64_t *data = &mydata[1].data64[0];
1464    uint32_t *data32;
1465    uint64_t tmp1, tmp2;
1466    int data_i, data_l, alen = auth_len;
1467    struct octeon_cop2_state state;
1468    unsigned long flags;
1469
1470    dprintk("%s()\n", __FUNCTION__);
1471
1472    if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
1473        (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
1474        (crypt_len & 0x7) ||
1475        (auth_len & 0x7) ||
1476        (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
1477    dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
1478        "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1479        "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
1480        auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
1481    return -EINVAL;
1482    }
1483
1484    SG_INIT(sg, data32, data_i, data_l);
1485
1486    CVMX_PREFETCH0(ivp);
1487    CVMX_PREFETCH0(od->octo_enckey);
1488
1489    flags = octeon_crypto_enable(&state);
1490
1491    /* load AES Key */
1492    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
1493    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
1494
1495    if (od->octo_encklen == 16) {
1496    CVMX_MT_AES_KEY(0x0, 2);
1497    CVMX_MT_AES_KEY(0x0, 3);
1498    } else if (od->octo_encklen == 24) {
1499    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1500    CVMX_MT_AES_KEY(0x0, 3);
1501    } else if (od->octo_encklen == 32) {
1502    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1503    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
1504    } else {
1505    octeon_crypto_disable(&state, flags);
1506    dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
1507    return -EINVAL;
1508    }
1509    CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
1510
1511    CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
1512    CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
1513
1514    /* Load MD5 IV */
1515    CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
1516    CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
1517
1518    while (crypt_off > 0 && auth_off > 0) {
1519    SG_CONSUME(sg, data32, data_i, data_l);
1520    crypt_off -= 4;
1521    auth_off -= 4;
1522    }
1523
1524    /* align auth and crypt */
1525    while (crypt_off > 0 && auth_len > 0) {
1526    mydata[0].data32[0] = *data32;
1527    SG_CONSUME(sg, data32, data_i, data_l);
1528    mydata[0].data32[1] = *data32;
1529    SG_CONSUME(sg, data32, data_i, data_l);
1530    CVM_LOAD_MD5_UNIT(*pdata, next);
1531    crypt_off -= 8;
1532    auth_len -= 8;
1533    }
1534
1535    while (crypt_len > 0) {
1536        uint32_t *pdata32[3];
1537
1538    pdata32[0] = data32;
1539    mydata[0].data32[0] = *data32;
1540    SG_CONSUME(sg, data32, data_i, data_l);
1541
1542    pdata32[1] = data32;
1543    mydata[0].data32[1] = *data32;
1544    SG_CONSUME(sg, data32, data_i, data_l);
1545
1546    pdata32[2] = data32;
1547    mydata[1].data32[0] = *data32;
1548    SG_CONSUME(sg, data32, data_i, data_l);
1549
1550    mydata[1].data32[1] = *data32;
1551
1552    CVMX_MT_AES_ENC_CBC0(*pdata);
1553    CVMX_MT_AES_ENC_CBC1(*data);
1554    CVMX_MF_AES_RESULT(*pdata, 0);
1555    CVMX_MF_AES_RESULT(*data, 1);
1556    crypt_len -= 16;
1557
1558    if (auth_len > 0) {
1559        CVM_LOAD_MD5_UNIT(*pdata, next);
1560        auth_len -= 8;
1561    }
1562    if (auth_len > 0) {
1563        CVM_LOAD_MD5_UNIT(*data, next);
1564        auth_len -= 8;
1565    }
1566
1567    *pdata32[0] = mydata[0].data32[0];
1568    *pdata32[1] = mydata[0].data32[1];
1569    *pdata32[2] = mydata[1].data32[0];
1570    *data32 = mydata[1].data32[1];
1571
1572    SG_CONSUME(sg, data32, data_i, data_l);
1573    }
1574
1575    /* finish any left over hashing */
1576    while (auth_len > 0) {
1577    mydata[0].data32[0] = *data32;
1578    SG_CONSUME(sg, data32, data_i, data_l);
1579    mydata[0].data32[1] = *data32;
1580    SG_CONSUME(sg, data32, data_i, data_l);
1581    CVM_LOAD_MD5_UNIT(*pdata, next);
1582    auth_len -= 8;
1583    }
1584
1585    /* finish the hash */
1586    CVMX_PREFETCH0(od->octo_hmouter);
1587#if 0
1588    if (unlikely(inplen)) {
1589    uint64_t tmp = 0;
1590    uint8_t *p = (uint8_t *) & tmp;
1591    p[inplen] = 0x80;
1592    do {
1593        inplen--;
1594        p[inplen] = ((uint8_t *) data)[inplen];
1595    } while (inplen);
1596    CVM_LOAD_MD5_UNIT(tmp, next);
1597    } else {
1598    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
1599    }
1600#else
1601    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
1602#endif
1603
1604    /* Finish Inner hash */
1605    while (next != 7) {
1606    CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
1607    }
1608    CVMX_ES64(tmp1, ((alen + 64) << 3));
1609    CVM_LOAD_MD5_UNIT(tmp1, next);
1610
1611    /* Get the inner hash of HMAC */
1612    CVMX_MF_HSH_IV(tmp1, 0);
1613    CVMX_MF_HSH_IV(tmp2, 1);
1614
1615    /* Initialize hash unit */
1616    CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
1617    CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
1618
1619    CVMX_MT_HSH_DAT(tmp1, 0);
1620    CVMX_MT_HSH_DAT(tmp2, 1);
1621    CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
1622    CVMX_MT_HSH_DATZ(3);
1623    CVMX_MT_HSH_DATZ(4);
1624    CVMX_MT_HSH_DATZ(5);
1625    CVMX_MT_HSH_DATZ(6);
1626    CVMX_ES64(tmp1, ((64 + 16) << 3));
1627    CVMX_MT_HSH_STARTMD5(tmp1);
1628
1629    /* save the HMAC */
1630    SG_INIT(sg, data32, data_i, data_l);
1631    while (icv_off > 0) {
1632    SG_CONSUME(sg, data32, data_i, data_l);
1633    icv_off -= 4;
1634    }
1635    CVMX_MF_HSH_IV(tmp1, 0);
1636    *data32 = (uint32_t) (tmp1 >> 32);
1637    SG_CONSUME(sg, data32, data_i, data_l);
1638    *data32 = (uint32_t) tmp1;
1639    SG_CONSUME(sg, data32, data_i, data_l);
1640    CVMX_MF_HSH_IV(tmp1, 1);
1641    *data32 = (uint32_t) (tmp1 >> 32);
1642
1643    octeon_crypto_disable(&state, flags);
1644    return 0;
1645}
1646
1647int
1648octo_aes_cbc_md5_decrypt(
1649    struct octo_sess *od,
1650    struct scatterlist *sg, int sg_len,
1651    int auth_off, int auth_len,
1652    int crypt_off, int crypt_len,
1653    int icv_off, uint8_t *ivp)
1654{
1655    register int next = 0;
1656    union {
1657    uint32_t data32[2];
1658    uint64_t data64[1];
1659    } mydata[2];
1660    uint64_t *pdata = &mydata[0].data64[0];
1661    uint64_t *data = &mydata[1].data64[0];
1662    uint32_t *data32;
1663    uint64_t tmp1, tmp2;
1664    int data_i, data_l, alen = auth_len;
1665    struct octeon_cop2_state state;
1666    unsigned long flags;
1667
1668    dprintk("%s()\n", __FUNCTION__);
1669
1670    if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
1671        (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
1672        (crypt_len & 0x7) ||
1673        (auth_len & 0x7) ||
1674        (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
1675    dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
1676        "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1677        "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
1678        auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
1679    return -EINVAL;
1680    }
1681
1682    SG_INIT(sg, data32, data_i, data_l);
1683
1684    CVMX_PREFETCH0(ivp);
1685    CVMX_PREFETCH0(od->octo_enckey);
1686
1687    flags = octeon_crypto_enable(&state);
1688
1689    /* load AES Key */
1690    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
1691    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
1692
1693    if (od->octo_encklen == 16) {
1694    CVMX_MT_AES_KEY(0x0, 2);
1695    CVMX_MT_AES_KEY(0x0, 3);
1696    } else if (od->octo_encklen == 24) {
1697    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1698    CVMX_MT_AES_KEY(0x0, 3);
1699    } else if (od->octo_encklen == 32) {
1700    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1701    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
1702    } else {
1703    octeon_crypto_disable(&state, flags);
1704    dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
1705    return -EINVAL;
1706    }
1707    CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
1708
1709    CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
1710    CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
1711
1712    /* Load MD5 IV */
1713    CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
1714    CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
1715
1716    while (crypt_off > 0 && auth_off > 0) {
1717    SG_CONSUME(sg, data32, data_i, data_l);
1718    crypt_off -= 4;
1719    auth_off -= 4;
1720    }
1721
1722    /* align auth and crypt */
1723    while (crypt_off > 0 && auth_len > 0) {
1724    mydata[0].data32[0] = *data32;
1725    SG_CONSUME(sg, data32, data_i, data_l);
1726    mydata[0].data32[1] = *data32;
1727    SG_CONSUME(sg, data32, data_i, data_l);
1728    CVM_LOAD_MD5_UNIT(*pdata, next);
1729    crypt_off -= 8;
1730    auth_len -= 8;
1731    }
1732
1733    while (crypt_len > 0) {
1734        uint32_t *pdata32[3];
1735
1736    pdata32[0] = data32;
1737    mydata[0].data32[0] = *data32;
1738    SG_CONSUME(sg, data32, data_i, data_l);
1739    pdata32[1] = data32;
1740    mydata[0].data32[1] = *data32;
1741    SG_CONSUME(sg, data32, data_i, data_l);
1742    pdata32[2] = data32;
1743    mydata[1].data32[0] = *data32;
1744    SG_CONSUME(sg, data32, data_i, data_l);
1745    mydata[1].data32[1] = *data32;
1746
1747    if (auth_len > 0) {
1748        CVM_LOAD_MD5_UNIT(*pdata, next);
1749        auth_len -= 8;
1750    }
1751
1752    if (auth_len > 0) {
1753        CVM_LOAD_MD5_UNIT(*data, next);
1754        auth_len -= 8;
1755    }
1756
1757    CVMX_MT_AES_DEC_CBC0(*pdata);
1758    CVMX_MT_AES_DEC_CBC1(*data);
1759    CVMX_MF_AES_RESULT(*pdata, 0);
1760    CVMX_MF_AES_RESULT(*data, 1);
1761    crypt_len -= 16;
1762
1763    *pdata32[0] = mydata[0].data32[0];
1764    *pdata32[1] = mydata[0].data32[1];
1765    *pdata32[2] = mydata[1].data32[0];
1766    *data32 = mydata[1].data32[1];
1767
1768    SG_CONSUME(sg, data32, data_i, data_l);
1769    }
1770
1771    /* finish left over hash if any */
1772    while (auth_len > 0) {
1773    mydata[0].data32[0] = *data32;
1774    SG_CONSUME(sg, data32, data_i, data_l);
1775    mydata[0].data32[1] = *data32;
1776    SG_CONSUME(sg, data32, data_i, data_l);
1777    CVM_LOAD_MD5_UNIT(*pdata, next);
1778    auth_len -= 8;
1779    }
1780
1781
1782    /* finish the hash */
1783    CVMX_PREFETCH0(od->octo_hmouter);
1784#if 0
1785    if (unlikely(inplen)) {
1786    uint64_t tmp = 0;
1787    uint8_t *p = (uint8_t *) & tmp;
1788    p[inplen] = 0x80;
1789    do {
1790        inplen--;
1791        p[inplen] = ((uint8_t *) data)[inplen];
1792    } while (inplen);
1793    CVM_LOAD_MD5_UNIT(tmp, next);
1794    } else {
1795    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
1796    }
1797#else
1798    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
1799#endif
1800
1801    /* Finish Inner hash */
1802    while (next != 7) {
1803    CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL), next);
1804    }
1805    CVMX_ES64(tmp1, ((alen + 64) << 3));
1806    CVM_LOAD_MD5_UNIT(tmp1, next);
1807
1808    /* Get the inner hash of HMAC */
1809    CVMX_MF_HSH_IV(tmp1, 0);
1810    CVMX_MF_HSH_IV(tmp2, 1);
1811
1812    /* Initialize hash unit */
1813    CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
1814    CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
1815
1816    CVMX_MT_HSH_DAT(tmp1, 0);
1817    CVMX_MT_HSH_DAT(tmp2, 1);
1818    CVMX_MT_HSH_DAT(0x8000000000000000ULL, 2);
1819    CVMX_MT_HSH_DATZ(3);
1820    CVMX_MT_HSH_DATZ(4);
1821    CVMX_MT_HSH_DATZ(5);
1822    CVMX_MT_HSH_DATZ(6);
1823    CVMX_ES64(tmp1, ((64 + 16) << 3));
1824    CVMX_MT_HSH_STARTMD5(tmp1);
1825
1826    /* save the HMAC */
1827    SG_INIT(sg, data32, data_i, data_l);
1828    while (icv_off > 0) {
1829    SG_CONSUME(sg, data32, data_i, data_l);
1830    icv_off -= 4;
1831    }
1832    CVMX_MF_HSH_IV(tmp1, 0);
1833    *data32 = (uint32_t) (tmp1 >> 32);
1834    SG_CONSUME(sg, data32, data_i, data_l);
1835    *data32 = (uint32_t) tmp1;
1836    SG_CONSUME(sg, data32, data_i, data_l);
1837    CVMX_MF_HSH_IV(tmp1, 1);
1838    *data32 = (uint32_t) (tmp1 >> 32);
1839
1840    octeon_crypto_disable(&state, flags);
1841    return 0;
1842}
1843
1844/****************************************************************************/
1845/* AES SHA1 */
1846
1847int
1848octo_aes_cbc_sha1_encrypt(
1849    struct octo_sess *od,
1850    struct scatterlist *sg, int sg_len,
1851    int auth_off, int auth_len,
1852    int crypt_off, int crypt_len,
1853    int icv_off, uint8_t *ivp)
1854{
1855    register int next = 0;
1856    union {
1857    uint32_t data32[2];
1858    uint64_t data64[1];
1859    } mydata[2];
1860    uint64_t *pdata = &mydata[0].data64[0];
1861    uint64_t *data = &mydata[1].data64[0];
1862    uint32_t *data32;
1863    uint64_t tmp1, tmp2, tmp3;
1864    int data_i, data_l, alen = auth_len;
1865    struct octeon_cop2_state state;
1866    unsigned long flags;
1867
1868    dprintk("%s(a_off=%d a_len=%d c_off=%d c_len=%d icv_off=%d)\n",
1869            __FUNCTION__, auth_off, auth_len, crypt_off, crypt_len, icv_off);
1870
1871    if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
1872        (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
1873        (crypt_len & 0x7) ||
1874        (auth_len & 0x7) ||
1875        (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
1876    dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
1877        "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1878        "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
1879        auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
1880    return -EINVAL;
1881    }
1882
1883    SG_INIT(sg, data32, data_i, data_l);
1884
1885    CVMX_PREFETCH0(ivp);
1886    CVMX_PREFETCH0(od->octo_enckey);
1887
1888    flags = octeon_crypto_enable(&state);
1889
1890    /* load AES Key */
1891    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
1892    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
1893
1894    if (od->octo_encklen == 16) {
1895    CVMX_MT_AES_KEY(0x0, 2);
1896    CVMX_MT_AES_KEY(0x0, 3);
1897    } else if (od->octo_encklen == 24) {
1898    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1899    CVMX_MT_AES_KEY(0x0, 3);
1900    } else if (od->octo_encklen == 32) {
1901    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
1902    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
1903    } else {
1904    octeon_crypto_disable(&state, flags);
1905    dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
1906    return -EINVAL;
1907    }
1908    CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
1909
1910    CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
1911    CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
1912
1913    /* Load SHA IV */
1914    CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
1915    CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
1916    CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
1917
1918    while (crypt_off > 0 && auth_off > 0) {
1919    SG_CONSUME(sg, data32, data_i, data_l);
1920    crypt_off -= 4;
1921    auth_off -= 4;
1922    }
1923
1924    /* align auth and crypt */
1925    while (crypt_off > 0 && auth_len > 0) {
1926    mydata[0].data32[0] = *data32;
1927    SG_CONSUME(sg, data32, data_i, data_l);
1928    mydata[0].data32[1] = *data32;
1929    SG_CONSUME(sg, data32, data_i, data_l);
1930    CVM_LOAD_SHA_UNIT(*pdata, next);
1931    crypt_off -= 8;
1932    auth_len -= 8;
1933    }
1934
1935    while (crypt_len > 0) {
1936        uint32_t *pdata32[3];
1937
1938    pdata32[0] = data32;
1939    mydata[0].data32[0] = *data32;
1940    SG_CONSUME(sg, data32, data_i, data_l);
1941    pdata32[1] = data32;
1942    mydata[0].data32[1] = *data32;
1943    SG_CONSUME(sg, data32, data_i, data_l);
1944    pdata32[2] = data32;
1945    mydata[1].data32[0] = *data32;
1946    SG_CONSUME(sg, data32, data_i, data_l);
1947    mydata[1].data32[1] = *data32;
1948
1949    CVMX_MT_AES_ENC_CBC0(*pdata);
1950    CVMX_MT_AES_ENC_CBC1(*data);
1951    CVMX_MF_AES_RESULT(*pdata, 0);
1952    CVMX_MF_AES_RESULT(*data, 1);
1953    crypt_len -= 16;
1954
1955    if (auth_len > 0) {
1956        CVM_LOAD_SHA_UNIT(*pdata, next);
1957        auth_len -= 8;
1958    }
1959    if (auth_len > 0) {
1960        CVM_LOAD_SHA_UNIT(*data, next);
1961        auth_len -= 8;
1962    }
1963
1964    *pdata32[0] = mydata[0].data32[0];
1965    *pdata32[1] = mydata[0].data32[1];
1966    *pdata32[2] = mydata[1].data32[0];
1967    *data32 = mydata[1].data32[1];
1968
1969    SG_CONSUME(sg, data32, data_i, data_l);
1970    }
1971
1972    /* finish and hashing */
1973    while (auth_len > 0) {
1974    mydata[0].data32[0] = *data32;
1975    SG_CONSUME(sg, data32, data_i, data_l);
1976    mydata[0].data32[1] = *data32;
1977    SG_CONSUME(sg, data32, data_i, data_l);
1978    CVM_LOAD_SHA_UNIT(*pdata, next);
1979    auth_len -= 8;
1980    }
1981
1982    /* finish the hash */
1983    CVMX_PREFETCH0(od->octo_hmouter);
1984#if 0
1985    if (unlikely(inplen)) {
1986    uint64_t tmp = 0;
1987    uint8_t *p = (uint8_t *) & tmp;
1988    p[inplen] = 0x80;
1989    do {
1990        inplen--;
1991        p[inplen] = ((uint8_t *) data)[inplen];
1992    } while (inplen);
1993    CVM_LOAD_SHA_UNIT(tmp, next);
1994    } else {
1995    CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
1996    }
1997#else
1998    CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
1999#endif
2000
2001    /* Finish Inner hash */
2002    while (next != 7) {
2003    CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
2004    }
2005    CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
2006
2007    /* Get the inner hash of HMAC */
2008    CVMX_MF_HSH_IV(tmp1, 0);
2009    CVMX_MF_HSH_IV(tmp2, 1);
2010    tmp3 = 0;
2011    CVMX_MF_HSH_IV(tmp3, 2);
2012
2013    /* Initialize hash unit */
2014    CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
2015    CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
2016    CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
2017
2018    CVMX_MT_HSH_DAT(tmp1, 0);
2019    CVMX_MT_HSH_DAT(tmp2, 1);
2020    tmp3 |= 0x0000000080000000;
2021    CVMX_MT_HSH_DAT(tmp3, 2);
2022    CVMX_MT_HSH_DATZ(3);
2023    CVMX_MT_HSH_DATZ(4);
2024    CVMX_MT_HSH_DATZ(5);
2025    CVMX_MT_HSH_DATZ(6);
2026    CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
2027
2028    /* finish the hash */
2029    CVMX_PREFETCH0(od->octo_hmouter);
2030#if 0
2031    if (unlikely(inplen)) {
2032    uint64_t tmp = 0;
2033    uint8_t *p = (uint8_t *) & tmp;
2034    p[inplen] = 0x80;
2035    do {
2036        inplen--;
2037        p[inplen] = ((uint8_t *) data)[inplen];
2038    } while (inplen);
2039    CVM_LOAD_MD5_UNIT(tmp, next);
2040    } else {
2041    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
2042    }
2043#else
2044    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
2045#endif
2046
2047    /* save the HMAC */
2048    SG_INIT(sg, data32, data_i, data_l);
2049    while (icv_off > 0) {
2050    SG_CONSUME(sg, data32, data_i, data_l);
2051    icv_off -= 4;
2052    }
2053    CVMX_MF_HSH_IV(tmp1, 0);
2054    *data32 = (uint32_t) (tmp1 >> 32);
2055    SG_CONSUME(sg, data32, data_i, data_l);
2056    *data32 = (uint32_t) tmp1;
2057    SG_CONSUME(sg, data32, data_i, data_l);
2058    CVMX_MF_HSH_IV(tmp1, 1);
2059    *data32 = (uint32_t) (tmp1 >> 32);
2060
2061    octeon_crypto_disable(&state, flags);
2062    return 0;
2063}
2064
2065int
2066octo_aes_cbc_sha1_decrypt(
2067    struct octo_sess *od,
2068    struct scatterlist *sg, int sg_len,
2069    int auth_off, int auth_len,
2070    int crypt_off, int crypt_len,
2071    int icv_off, uint8_t *ivp)
2072{
2073    register int next = 0;
2074    union {
2075    uint32_t data32[2];
2076    uint64_t data64[1];
2077    } mydata[2];
2078    uint64_t *pdata = &mydata[0].data64[0];
2079    uint64_t *data = &mydata[1].data64[0];
2080    uint32_t *data32;
2081    uint64_t tmp1, tmp2, tmp3;
2082    int data_i, data_l, alen = auth_len;
2083    struct octeon_cop2_state state;
2084    unsigned long flags;
2085
2086    dprintk("%s(a_off=%d a_len=%d c_off=%d c_len=%d icv_off=%d)\n",
2087            __FUNCTION__, auth_off, auth_len, crypt_off, crypt_len, icv_off);
2088
2089    if (unlikely(od == NULL || sg==NULL || sg_len==0 || ivp==NULL ||
2090        (crypt_off & 0x3) || (crypt_off + crypt_len > sg_len) ||
2091        (crypt_len & 0x7) ||
2092        (auth_len & 0x7) ||
2093        (auth_off & 0x3) || (auth_off + auth_len > sg_len))) {
2094    dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
2095        "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
2096        "icv_off=%d ivp=%p\n", __FUNCTION__, od, sg, sg_len,
2097        auth_off, auth_len, crypt_off, crypt_len, icv_off, ivp);
2098    return -EINVAL;
2099    }
2100
2101    SG_INIT(sg, data32, data_i, data_l);
2102
2103    CVMX_PREFETCH0(ivp);
2104    CVMX_PREFETCH0(od->octo_enckey);
2105
2106    flags = octeon_crypto_enable(&state);
2107
2108    /* load AES Key */
2109    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[0], 0);
2110    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[1], 1);
2111
2112    if (od->octo_encklen == 16) {
2113    CVMX_MT_AES_KEY(0x0, 2);
2114    CVMX_MT_AES_KEY(0x0, 3);
2115    } else if (od->octo_encklen == 24) {
2116    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
2117    CVMX_MT_AES_KEY(0x0, 3);
2118    } else if (od->octo_encklen == 32) {
2119    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[2], 2);
2120    CVMX_MT_AES_KEY(((uint64_t *) od->octo_enckey)[3], 3);
2121    } else {
2122    octeon_crypto_disable(&state, flags);
2123    dprintk("%s: Bad key length %d\n", __FUNCTION__, od->octo_encklen);
2124    return -EINVAL;
2125    }
2126    CVMX_MT_AES_KEYLENGTH(od->octo_encklen / 8 - 1);
2127
2128    CVMX_MT_AES_IV(((uint64_t *) ivp)[0], 0);
2129    CVMX_MT_AES_IV(((uint64_t *) ivp)[1], 1);
2130
2131    /* Load SHA1 IV */
2132    CVMX_MT_HSH_IV(od->octo_hminner[0], 0);
2133    CVMX_MT_HSH_IV(od->octo_hminner[1], 1);
2134    CVMX_MT_HSH_IV(od->octo_hminner[2], 2);
2135
2136    while (crypt_off > 0 && auth_off > 0) {
2137    SG_CONSUME(sg, data32, data_i, data_l);
2138    crypt_off -= 4;
2139    auth_off -= 4;
2140    }
2141
2142    /* align auth and crypt */
2143    while (crypt_off > 0 && auth_len > 0) {
2144    mydata[0].data32[0] = *data32;
2145    SG_CONSUME(sg, data32, data_i, data_l);
2146    mydata[0].data32[1] = *data32;
2147    SG_CONSUME(sg, data32, data_i, data_l);
2148    CVM_LOAD_SHA_UNIT(*pdata, next);
2149    crypt_off -= 8;
2150    auth_len -= 8;
2151    }
2152
2153    while (crypt_len > 0) {
2154        uint32_t *pdata32[3];
2155
2156    pdata32[0] = data32;
2157    mydata[0].data32[0] = *data32;
2158    SG_CONSUME(sg, data32, data_i, data_l);
2159    pdata32[1] = data32;
2160    mydata[0].data32[1] = *data32;
2161    SG_CONSUME(sg, data32, data_i, data_l);
2162    pdata32[2] = data32;
2163    mydata[1].data32[0] = *data32;
2164    SG_CONSUME(sg, data32, data_i, data_l);
2165    mydata[1].data32[1] = *data32;
2166
2167    if (auth_len > 0) {
2168        CVM_LOAD_SHA_UNIT(*pdata, next);
2169        auth_len -= 8;
2170    }
2171    if (auth_len > 0) {
2172        CVM_LOAD_SHA_UNIT(*data, next);
2173        auth_len -= 8;
2174    }
2175
2176    CVMX_MT_AES_DEC_CBC0(*pdata);
2177    CVMX_MT_AES_DEC_CBC1(*data);
2178    CVMX_MF_AES_RESULT(*pdata, 0);
2179    CVMX_MF_AES_RESULT(*data, 1);
2180    crypt_len -= 16;
2181
2182    *pdata32[0] = mydata[0].data32[0];
2183    *pdata32[1] = mydata[0].data32[1];
2184    *pdata32[2] = mydata[1].data32[0];
2185    *data32 = mydata[1].data32[1];
2186
2187    SG_CONSUME(sg, data32, data_i, data_l);
2188    }
2189
2190    /* finish and leftover hashing */
2191    while (auth_len > 0) {
2192    mydata[0].data32[0] = *data32;
2193    SG_CONSUME(sg, data32, data_i, data_l);
2194    mydata[0].data32[1] = *data32;
2195    SG_CONSUME(sg, data32, data_i, data_l);
2196    CVM_LOAD_SHA_UNIT(*pdata, next);
2197    auth_len -= 8;
2198    }
2199
2200    /* finish the hash */
2201    CVMX_PREFETCH0(od->octo_hmouter);
2202#if 0
2203    if (unlikely(inplen)) {
2204    uint64_t tmp = 0;
2205    uint8_t *p = (uint8_t *) & tmp;
2206    p[inplen] = 0x80;
2207    do {
2208        inplen--;
2209        p[inplen] = ((uint8_t *) data)[inplen];
2210    } while (inplen);
2211    CVM_LOAD_SHA_UNIT(tmp, next);
2212    } else {
2213    CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
2214    }
2215#else
2216    CVM_LOAD_SHA_UNIT(0x8000000000000000ULL, next);
2217#endif
2218
2219    /* Finish Inner hash */
2220    while (next != 7) {
2221    CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL), next);
2222    }
2223    CVM_LOAD_SHA_UNIT((uint64_t) ((alen + 64) << 3), next);
2224
2225    /* Get the inner hash of HMAC */
2226    CVMX_MF_HSH_IV(tmp1, 0);
2227    CVMX_MF_HSH_IV(tmp2, 1);
2228    tmp3 = 0;
2229    CVMX_MF_HSH_IV(tmp3, 2);
2230
2231    /* Initialize hash unit */
2232    CVMX_MT_HSH_IV(od->octo_hmouter[0], 0);
2233    CVMX_MT_HSH_IV(od->octo_hmouter[1], 1);
2234    CVMX_MT_HSH_IV(od->octo_hmouter[2], 2);
2235
2236    CVMX_MT_HSH_DAT(tmp1, 0);
2237    CVMX_MT_HSH_DAT(tmp2, 1);
2238    tmp3 |= 0x0000000080000000;
2239    CVMX_MT_HSH_DAT(tmp3, 2);
2240    CVMX_MT_HSH_DATZ(3);
2241    CVMX_MT_HSH_DATZ(4);
2242    CVMX_MT_HSH_DATZ(5);
2243    CVMX_MT_HSH_DATZ(6);
2244    CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
2245
2246    /* finish the hash */
2247    CVMX_PREFETCH0(od->octo_hmouter);
2248#if 0
2249    if (unlikely(inplen)) {
2250    uint64_t tmp = 0;
2251    uint8_t *p = (uint8_t *) & tmp;
2252    p[inplen] = 0x80;
2253    do {
2254        inplen--;
2255        p[inplen] = ((uint8_t *) data)[inplen];
2256    } while (inplen);
2257    CVM_LOAD_MD5_UNIT(tmp, next);
2258    } else {
2259    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
2260    }
2261#else
2262    CVM_LOAD_MD5_UNIT(0x8000000000000000ULL, next);
2263#endif
2264
2265    /* save the HMAC */
2266    SG_INIT(sg, data32, data_i, data_l);
2267    while (icv_off > 0) {
2268    SG_CONSUME(sg, data32, data_i, data_l);
2269    icv_off -= 4;
2270    }
2271    CVMX_MF_HSH_IV(tmp1, 0);
2272    *data32 = (uint32_t) (tmp1 >> 32);
2273    SG_CONSUME(sg, data32, data_i, data_l);
2274    *data32 = (uint32_t) tmp1;
2275    SG_CONSUME(sg, data32, data_i, data_l);
2276    CVMX_MF_HSH_IV(tmp1, 1);
2277    *data32 = (uint32_t) (tmp1 >> 32);
2278
2279    octeon_crypto_disable(&state, flags);
2280    return 0;
2281}
2282
2283/****************************************************************************/
2284

Archive Download this file



interactive