The Design and Implementation of the FreeBSD Operating System, Second Edition
Now available: The Design and Implementation of the FreeBSD Operating System (Second Edition)


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]

FreeBSD/Linux Kernel Cross Reference
sys/mips/nlm/hal/nlmsaelib.h

Version: -  FREEBSD  -  FREEBSD-13-STABLE  -  FREEBSD-13-0  -  FREEBSD-12-STABLE  -  FREEBSD-12-0  -  FREEBSD-11-STABLE  -  FREEBSD-11-0  -  FREEBSD-10-STABLE  -  FREEBSD-10-0  -  FREEBSD-9-STABLE  -  FREEBSD-9-0  -  FREEBSD-8-STABLE  -  FREEBSD-8-0  -  FREEBSD-7-STABLE  -  FREEBSD-7-0  -  FREEBSD-6-STABLE  -  FREEBSD-6-0  -  FREEBSD-5-STABLE  -  FREEBSD-5-0  -  FREEBSD-4-STABLE  -  FREEBSD-3-STABLE  -  FREEBSD22  -  l41  -  OPENBSD  -  linux-2.6  -  MK84  -  PLAN9  -  xnu-8792 
SearchContext: -  none  -  3  -  10 

    1 /*-
    2  * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
    3  *
    4  * Copyright (c) 2003-2012 Broadcom Corporation
    5  * All Rights Reserved
    6  *
    7  * Redistribution and use in source and binary forms, with or without
    8  * modification, are permitted provided that the following conditions
    9  * are met:
   10  *
   11  * 1. Redistributions of source code must retain the above copyright
   12  *    notice, this list of conditions and the following disclaimer.
   13  * 2. Redistributions in binary form must reproduce the above copyright
   14  *    notice, this list of conditions and the following disclaimer in
   15  *    the documentation and/or other materials provided with the
   16  *    distribution.
   17  *
   18  * THIS SOFTWARE IS PROVIDED BY BROADCOM ``AS IS'' AND ANY EXPRESS OR
   19  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
   20  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
   21  * ARE DISCLAIMED. IN NO EVENT SHALL BROADCOM OR CONTRIBUTORS BE LIABLE
   22  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
   23  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
   24  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
   25  * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
   26  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
   27  * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
   28  * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
   29  *
   30  * $FreeBSD$
   31  */
   32 
   33 #ifndef _NLM_HAL_CRYPTO_H_
   34 #define _NLM_HAL_CRYPTO_H_
   35 
   36 #define SAE_CFG_REG             0x00
   37 #define SAE_ENG_SEL_0           0x01
   38 #define SAE_ENG_SEL_1           0x02
   39 #define SAE_ENG_SEL_2           0x03
   40 #define SAE_ENG_SEL_3           0x04
   41 #define SAE_ENG_SEL_4           0x05
   42 #define SAE_ENG_SEL_5           0x06
   43 #define SAE_ENG_SEL_6           0x07
   44 #define SAE_ENG_SEL_7           0x08
   45 
   46 #define RSA_CFG_REG             0x00
   47 #define RSA_ENG_SEL_0           0x01
   48 #define RSA_ENG_SEL_1           0x02
   49 #define RSA_ENG_SEL_2           0x03
   50 
   51 #define nlm_read_sec_reg(b, r)          nlm_read_reg(b, r)
   52 #define nlm_write_sec_reg(b, r, v)      nlm_write_reg(b, r, v)
   53 #define nlm_get_sec_pcibase(node)       nlm_pcicfg_base(XLP_IO_SEC_OFFSET(node))
   54 #define nlm_get_sec_regbase(node)        \
   55                         (nlm_get_sec_pcibase(node) + XLP_IO_PCI_HDRSZ)
   56 
   57 #define nlm_read_rsa_reg(b, r)          nlm_read_reg(b, r)
   58 #define nlm_write_rsa_reg(b, r, v)      nlm_write_reg(b, r, v)
   59 #define nlm_get_rsa_pcibase(node)       nlm_pcicfg_base(XLP_IO_RSA_OFFSET(node))
   60 #define nlm_get_rsa_regbase(node)        \
   61                         (nlm_get_rsa_pcibase(node) + XLP_IO_PCI_HDRSZ)
   62 
   63 #define nlm_pcibase_sec(node)     nlm_pcicfg_base(XLP_IO_SEC_OFFSET(node))
   64 #define nlm_qidstart_sec(node)    nlm_qidstart_kseg(nlm_pcibase_sec(node))
   65 #define nlm_qnum_sec(node)        nlm_qnum_kseg(nlm_pcibase_sec(node))
   66 
   67 /*
   68  * Since buffer allocation for crypto at kernel is done as malloc, each
   69  * segment size is given as page size which is 4K by default
   70  */
   71 #define NLM_CRYPTO_MAX_SEG_LEN  PAGE_SIZE
   72 
   73 #define MAX_KEY_LEN_IN_DW               20
   74 
   75 #define left_shift64(x, bitshift, numofbits)                    \
   76     ((uint64_t)(x) << (bitshift))
   77 
   78 #define left_shift64_mask(x, bitshift, numofbits)                       \
   79     (((uint64_t)(x) & ((1ULL << (numofbits)) - 1)) << (bitshift))
   80 
   81 /**
   82 * @brief cipher algorithms
   83 * @ingroup crypto
   84 */
   85 enum nlm_cipher_algo {
   86         NLM_CIPHER_BYPASS = 0,
   87         NLM_CIPHER_DES = 1,
   88         NLM_CIPHER_3DES = 2,
   89         NLM_CIPHER_AES128 = 3,
   90         NLM_CIPHER_AES192 = 4,
   91         NLM_CIPHER_AES256 = 5,
   92         NLM_CIPHER_ARC4 = 6,
   93         NLM_CIPHER_KASUMI_F8 = 7,
   94         NLM_CIPHER_SNOW3G_F8 = 8,
   95         NLM_CIPHER_CAMELLIA128 = 9,
   96         NLM_CIPHER_CAMELLIA192 = 0xA,
   97         NLM_CIPHER_CAMELLIA256 = 0xB,
   98         NLM_CIPHER_MAX = 0xC,
   99 };
  100 
  101 /**
  102 * @brief cipher modes
  103 * @ingroup crypto
  104 */
  105 enum nlm_cipher_mode {
  106         NLM_CIPHER_MODE_ECB = 0,
  107         NLM_CIPHER_MODE_CBC = 1,
  108         NLM_CIPHER_MODE_CFB = 2,
  109         NLM_CIPHER_MODE_OFB = 3,
  110         NLM_CIPHER_MODE_CTR = 4,
  111         NLM_CIPHER_MODE_AES_F8 = 5,
  112         NLM_CIPHER_MODE_GCM = 6,
  113         NLM_CIPHER_MODE_CCM = 7,
  114         NLM_CIPHER_MODE_UNDEFINED1 = 8,
  115         NLM_CIPHER_MODE_UNDEFINED2 = 9,
  116         NLM_CIPHER_MODE_LRW = 0xA,
  117         NLM_CIPHER_MODE_XTS = 0xB,
  118         NLM_CIPHER_MODE_MAX = 0xC,
  119 };
  120 
  121 /**
  122 * @brief hash algorithms
  123 * @ingroup crypto
  124 */
  125 enum nlm_hash_algo {
  126         NLM_HASH_BYPASS = 0,
  127         NLM_HASH_MD5 = 1,
  128         NLM_HASH_SHA = 2,
  129         NLM_HASH_UNDEFINED = 3,
  130         NLM_HASH_AES128 = 4,
  131         NLM_HASH_AES192 = 5,
  132         NLM_HASH_AES256 = 6,
  133         NLM_HASH_KASUMI_F9 = 7,
  134         NLM_HASH_SNOW3G_F9 = 8,
  135         NLM_HASH_CAMELLIA128 = 9,
  136         NLM_HASH_CAMELLIA192 = 0xA,
  137         NLM_HASH_CAMELLIA256 = 0xB,
  138         NLM_HASH_GHASH = 0xC,
  139         NLM_HASH_MAX = 0xD
  140 };
  141 
  142 /**
  143 * @brief hash modes
  144 * @ingroup crypto
  145 */
  146 enum nlm_hash_mode {
  147         NLM_HASH_MODE_SHA1 = 0, /* Only SHA */
  148         NLM_HASH_MODE_SHA224 = 1,       /* Only SHA */
  149         NLM_HASH_MODE_SHA256 = 2,       /* Only SHA */
  150         NLM_HASH_MODE_SHA384 = 3,       /* Only SHA */
  151         NLM_HASH_MODE_SHA512 = 4,       /* Only SHA */
  152         NLM_HASH_MODE_CMAC = 5, /* AES and Camellia */
  153         NLM_HASH_MODE_XCBC = 6, /* AES and Camellia */
  154         NLM_HASH_MODE_CBC_MAC = 7,      /* AES and Camellia */
  155         NLM_HASH_MODE_CCM = 8,  /* AES */
  156         NLM_HASH_MODE_GCM = 9,  /* AES */
  157         NLM_HASH_MODE_MAX = 0xA,
  158 };
  159 
  160 /**
  161 * @brief crypto control descriptor, should be cache aligned
  162 * @ingroup crypto
  163 */
  164 struct nlm_crypto_pkt_ctrl {
  165         uint64_t desc0;
  166         /* combination of cipher and hash keys */
  167         uint64_t key[MAX_KEY_LEN_IN_DW];
  168         uint32_t cipherkeylen;
  169         uint32_t hashkeylen;
  170         uint32_t taglen;
  171 };
  172 
  173 /**
  174 * @brief crypto packet descriptor, should be cache aligned
  175 * @ingroup crypto
  176 */
  177 struct nlm_crypto_pkt_param {
  178         uint64_t desc0;
  179         uint64_t desc1;
  180         uint64_t desc2;
  181         uint64_t desc3;
  182         uint64_t segment[1][2];
  183 };
  184 
  185 static __inline__ uint64_t
  186 nlm_crypto_form_rsa_ecc_fmn_entry0(unsigned int l3alloc, unsigned int type,
  187     unsigned int func, uint64_t srcaddr)
  188 {
  189         return (left_shift64(l3alloc, 61, 1) |
  190             left_shift64(type, 46, 7) |
  191             left_shift64(func, 40, 6) |
  192             left_shift64(srcaddr, 0, 40));
  193 }
  194 
  195 static __inline__ uint64_t
  196 nlm_crypto_form_rsa_ecc_fmn_entry1(unsigned int dstclobber,
  197     unsigned int l3alloc, unsigned int fbvc, uint64_t dstaddr)
  198 {
  199         return (left_shift64(dstclobber, 62, 1) |
  200             left_shift64(l3alloc, 61, 1) |
  201             left_shift64(fbvc, 40, 12) |
  202             left_shift64(dstaddr, 0, 40));
  203 }
  204 
  205 /**
  206 * @brief Generate cypto control descriptor
  207 * @ingroup crypto
  208 * hmac : 1 for hash with hmac
  209 * hashalg, see hash_alg enums
  210 * hashmode, see hash_mode enums
  211 * cipherhalg, see  cipher_alg enums
  212 * ciphermode, see  cipher_mode enums
  213 * arc4_cipherkeylen : length of arc4 cipher key, 0 is interpreted as 32
  214 * arc4_keyinit :
  215 * cfbmask : cipher text for feedback,
  216 *           0(1 bit), 1(2 bits), 2(4 bits), 3(8 bits), 4(16bits), 5(32 bits),
  217 *           6(64 bits), 7(128 bits)
  218 */
  219 static __inline__ uint64_t
  220 nlm_crypto_form_pkt_ctrl_desc(unsigned int hmac, unsigned int hashalg,
  221     unsigned int hashmode, unsigned int cipheralg, unsigned int ciphermode,
  222     unsigned int arc4_cipherkeylen, unsigned int arc4_keyinit,
  223     unsigned int cfbmask)
  224 {
  225         return (left_shift64(hmac, 61, 1) |
  226             left_shift64(hashalg, 52, 8) |
  227             left_shift64(hashmode, 43, 8) |
  228             left_shift64(cipheralg, 34, 8) |
  229             left_shift64(ciphermode, 25, 8) |
  230             left_shift64(arc4_cipherkeylen, 18, 5) |
  231             left_shift64(arc4_keyinit, 17, 1) |
  232             left_shift64(cfbmask, 0, 3));
  233 }
  234 /**
  235 * @brief Generate cypto packet descriptor 0
  236 * @ingroup crypto
  237 * tls : 1 (tls enabled) 0(tls disabled)
  238 * hash_source : 1 (encrypted data is sent to the auth engine)
  239 *               0 (plain data is sent to the auth engine)
  240 * hashout_l3alloc : 1 (auth output is transited through l3 cache)
  241 * encrypt : 1 (for encrypt) 0 (for decrypt)
  242 * ivlen : iv length in bytes
  243 * hashdst_addr : hash out physical address, byte aligned
  244 */
  245 static __inline__ uint64_t
  246 nlm_crypto_form_pkt_desc0(unsigned int tls, unsigned int hash_source,
  247     unsigned int hashout_l3alloc, unsigned int encrypt, unsigned int ivlen,
  248     uint64_t hashdst_addr)
  249 {
  250         return (left_shift64(tls, 63, 1) |
  251             left_shift64(hash_source, 62, 1) |
  252             left_shift64(hashout_l3alloc, 60, 1) |
  253             left_shift64(encrypt, 59, 1) |
  254             left_shift64_mask((ivlen - 1), 41, 16) |
  255             left_shift64(hashdst_addr, 0, 40));
  256 }
  257 
  258 /**
  259 * @brief Generate cypto packet descriptor 1
  260 * @ingroup crypto
  261 * cipherlen : cipher length in bytes
  262 * hashlen : hash length in bytes
  263 */
  264 static __inline__ uint64_t
  265 nlm_crypto_form_pkt_desc1(unsigned int cipherlen, unsigned int hashlen)
  266 {
  267         return (left_shift64_mask((cipherlen - 1), 32, 32) |
  268             left_shift64_mask((hashlen - 1), 0, 32));
  269 }
  270 
  271 /**
  272 * @brief Generate cypto packet descriptor 2
  273 * @ingroup crypto
  274 * ivoff : iv offset, offset from start of src data addr
  275 * ciperbit_cnt : number of valid bits in the last input byte to the cipher,
  276 *                0 (8 bits), 1 (1 bit)..7 (7 bits)
  277 * cipheroff : cipher offset, offset from start of src data addr
  278 * hashbit_cnt : number of valid bits in the last input byte to the auth
  279 *              0 (8 bits), 1 (1 bit)..7 (7 bits)
  280 * hashclobber : 1 (hash output will be written as multiples of cachelines, no
  281 *              read modify write)
  282 * hashoff : hash offset, offset from start of src data addr
  283 */
  284 
  285 static __inline__ uint64_t
  286 nlm_crypto_form_pkt_desc2(unsigned int ivoff, unsigned int cipherbit_cnt,
  287     unsigned int cipheroff, unsigned int hashbit_cnt, unsigned int hashclobber,
  288     unsigned int hashoff)
  289 {
  290         return (left_shift64(ivoff , 45, 16) |
  291             left_shift64(cipherbit_cnt, 42, 3) |
  292             left_shift64(cipheroff, 22, 16) |
  293             left_shift64(hashbit_cnt, 19, 3) |
  294             left_shift64(hashclobber, 18, 1) |
  295             left_shift64(hashoff, 0, 16));
  296 }
  297 
  298 /**
  299 * @brief Generate cypto packet descriptor 3
  300 * @ingroup crypto
  301 * designer_vc : designer freeback fmn destination id
  302 * taglen : length in bits of the tag generated by the auth engine
  303 *          md5 (128 bits), sha1 (160), sha224 (224), sha384 (384),
  304 *          sha512 (512), Kasumi (32), snow3g (32), gcm (128)
  305 * hmacpad : 1 if hmac padding is already done
  306 */
  307 static  __inline__ uint64_t
  308 nlm_crypto_form_pkt_desc3(unsigned int designer_vc, unsigned int taglen,
  309     unsigned int arc4_state_save_l3, unsigned int arc4_save_state,
  310     unsigned int hmacpad)
  311 {
  312         return (left_shift64(designer_vc, 48, 16) |
  313             left_shift64(taglen, 11, 16) |
  314             left_shift64(arc4_state_save_l3, 8, 1) |
  315             left_shift64(arc4_save_state, 6, 1) |
  316             left_shift64(hmacpad, 5, 1));
  317 }
  318 
  319 /**
  320 * @brief Generate cypto packet descriptor 4
  321 * @ingroup crypto
  322 * srcfraglen : length of the source fragment(header + data + tail) in bytes
  323 * srcfragaddr : physical address of the srouce fragment
  324 */
  325 static __inline__ uint64_t
  326 nlm_crypto_form_pkt_desc4(uint64_t srcfraglen,
  327     unsigned int srcfragaddr )
  328 {
  329         return (left_shift64_mask((srcfraglen - 1), 48, 16) |
  330             left_shift64(srcfragaddr, 0, 40));
  331 }
  332 
  333 /**
  334 * @brief Generate cypto packet descriptor 5
  335 * @ingroup crypto
  336 * dstfraglen : length of the dst fragment(header + data + tail) in bytes
  337 * chipherout_l3alloc : 1(cipher output is transited through l3 cache)
  338 * cipherclobber : 1 (cipher output will be written as multiples of cachelines,
  339 *                 no read modify write)
  340 * chiperdst_addr : physical address of the cipher destination address
  341 */
  342 static __inline__ uint64_t
  343 nlm_crypto_form_pkt_desc5(unsigned int dstfraglen,
  344     unsigned int cipherout_l3alloc, unsigned int cipherclobber,
  345     uint64_t cipherdst_addr)
  346 
  347 {
  348         return (left_shift64_mask((dstfraglen - 1), 48, 16) |
  349             left_shift64(cipherout_l3alloc, 46, 1) |
  350             left_shift64(cipherclobber, 41, 1) |
  351             left_shift64(cipherdst_addr, 0, 40));
  352 }
  353 
  354 /**
  355   * @brief Generate crypto packet fmn message entry 0
  356   * @ingroup crypto
  357   * freeback_vc: freeback response destination address
  358   * designer_fblen : Designer freeback length, 1 - 4
  359   * designerdesc_valid : designer desc valid or not
  360   * cipher_keylen : cipher key length in bytes
  361   * ctrldesc_addr : physicall address of the control descriptor
  362   */
  363 static __inline__ uint64_t
  364 nlm_crypto_form_pkt_fmn_entry0(unsigned int freeback_vc,
  365     unsigned int designer_fblen, unsigned int designerdesc_valid,
  366     unsigned int cipher_keylen, uint64_t cntldesc_addr)
  367 {
  368         return (left_shift64(freeback_vc, 48, 16) |
  369             left_shift64_mask(designer_fblen - 1, 46, 2) |
  370             left_shift64(designerdesc_valid, 45, 1) |
  371             left_shift64_mask(((cipher_keylen + 7) >> 3), 40, 5) |
  372             left_shift64(cntldesc_addr >> 6, 0, 34));
  373 }
  374 
  375 /**
  376   * @brief Generate crypto packet fmn message entry 1
  377   * @ingroup crypto
  378   * arc4load_state : 1 if load state required 0 otherwise
  379   * hash_keylen : hash key length in bytes
  380   * pktdesc_size : packet descriptor size in bytes
  381   * pktdesc_addr : physicall address of the packet descriptor
  382   */
  383 static __inline__ uint64_t
  384 nlm_crypto_form_pkt_fmn_entry1(unsigned int arc4load_state,
  385     unsigned int hash_keylen, unsigned int pktdesc_size,
  386     uint64_t pktdesc_addr)
  387 {
  388         return (left_shift64(arc4load_state, 63, 1) |
  389             left_shift64_mask(((hash_keylen + 7) >> 3), 56, 5) |
  390             left_shift64_mask(((pktdesc_size >> 4) - 1), 43, 12) |
  391             left_shift64(pktdesc_addr >> 6, 0, 34));
  392 }
  393 
  394 static __inline__ int
  395 nlm_crypto_get_hklen_taglen(enum nlm_hash_algo hashalg,
  396     enum nlm_hash_mode hashmode, unsigned int *taglen, unsigned int *hklen)
  397 {
  398         if (hashalg == NLM_HASH_MD5) {
  399                 *taglen = 128;
  400                 *hklen  = 64;
  401         } else if (hashalg == NLM_HASH_SHA) {
  402                 switch (hashmode) {
  403                 case NLM_HASH_MODE_SHA1:
  404                         *taglen = 160;
  405                         *hklen  = 64;
  406                         break;
  407                 case NLM_HASH_MODE_SHA224:
  408                         *taglen = 224;
  409                         *hklen  = 64;
  410                         break;
  411                 case NLM_HASH_MODE_SHA256:
  412                         *taglen = 256;
  413                         *hklen  = 64;
  414                         break;
  415                 case NLM_HASH_MODE_SHA384:
  416                         *taglen = 384;
  417                         *hklen  = 128;
  418                         break;
  419                 case NLM_HASH_MODE_SHA512:
  420                         *taglen = 512;
  421                         *hklen  = 128;
  422                         break;
  423                 default:
  424                         printf("Error : invalid shaid (%s)\n", __func__);
  425                         return (-1);
  426                 }
  427         } else if (hashalg == NLM_HASH_KASUMI_F9) {
  428                 *taglen = 32;
  429                 *hklen  = 0;
  430         } else if (hashalg == NLM_HASH_SNOW3G_F9) {
  431                 *taglen = 32;
  432                 *hklen  = 0;
  433         } else if (hashmode == NLM_HASH_MODE_XCBC) {
  434                 *taglen = 128;
  435                 *hklen  = 0;
  436         } else if (hashmode == NLM_HASH_MODE_GCM) {
  437                 *taglen = 128;
  438                 *hklen  = 0;
  439         } else if (hashalg == NLM_HASH_BYPASS) {
  440                 *taglen = 0;
  441                 *hklen  = 0;
  442         } else {
  443                 printf("Error:Hash alg/mode not found\n");
  444                 return (-1);
  445         }
  446 
  447         /* TODO : Add remaining cases */
  448         return (0);
  449 }
  450 
  451 /**
  452 * @brief Generate fill cryto control info structure
  453 * @ingroup crypto
  454 * hmac : 1 for hash with hmac
  455 * hashalg: see above,  hash_alg enums
  456 * hashmode: see above, hash_mode enums
  457 * cipherhalg: see above,  cipher_alg enums
  458 * ciphermode: see above, cipher_mode enums
  459 *
  460 */
  461 static __inline__ int
  462 nlm_crypto_fill_pkt_ctrl(struct nlm_crypto_pkt_ctrl *ctrl, unsigned int hmac,
  463     enum nlm_hash_algo hashalg, enum nlm_hash_mode hashmode,
  464     enum nlm_cipher_algo cipheralg, enum nlm_cipher_mode ciphermode,
  465     unsigned char *cipherkey, unsigned int cipherkeylen,
  466     unsigned char *hashkey, unsigned int hashkeylen)
  467 {
  468         unsigned int taglen = 0, hklen = 0;
  469 
  470         ctrl->desc0 = nlm_crypto_form_pkt_ctrl_desc(hmac, hashalg, hashmode,
  471             cipheralg, ciphermode, 0, 0, 0);
  472         memset(ctrl->key, 0, sizeof(ctrl->key));
  473         if (cipherkey)
  474                 memcpy(ctrl->key, cipherkey, cipherkeylen);
  475         if (hashkey)
  476                 memcpy((unsigned char *)&ctrl->key[(cipherkeylen + 7) / 8],
  477                             hashkey, hashkeylen);
  478         if (nlm_crypto_get_hklen_taglen(hashalg, hashmode, &taglen, &hklen)
  479             < 0)
  480                 return (-1);
  481 
  482         ctrl->cipherkeylen = cipherkeylen;
  483         ctrl->hashkeylen = hklen;
  484         ctrl->taglen = taglen;
  485 
  486         /* TODO : add the invalid checks and return error */
  487         return (0);
  488 }
  489 
  490 /**
  491 * @brief Top level function for generation pkt desc 0 to 3 for cipher auth
  492 * @ingroup crypto
  493 * ctrl : pointer to control structure
  494 * param : pointer to the param structure
  495 * encrypt : 1(for encrypt) 0(for decrypt)
  496 * hash_source : 1(encrypted data is sent to the auth engine) 0(plain data is
  497 *               sent to the auth engine)
  498 * ivoff : iv offset from start of data
  499 * ivlen : iv length in bytes
  500 * hashoff : hash offset from start of data
  501 * hashlen : hash length in bytes
  502 * hmacpad : hmac padding required or not, 1 if already padded
  503 * cipheroff : cipher offset from start of data
  504 * cipherlen : cipher length in bytes
  505 * hashdst_addr : hash destination physical address
  506 */
  507 static __inline__ void
  508 nlm_crypto_fill_cipher_auth_pkt_param(struct nlm_crypto_pkt_ctrl *ctrl,
  509     struct nlm_crypto_pkt_param *param, unsigned int encrypt,
  510     unsigned int hash_source, unsigned int ivoff, unsigned int ivlen,
  511     unsigned int hashoff, unsigned int hashlen, unsigned int hmacpad,
  512     unsigned int cipheroff, unsigned int cipherlen, unsigned char *hashdst_addr)
  513 {
  514         param->desc0 = nlm_crypto_form_pkt_desc0(0, hash_source, 1, encrypt,
  515                            ivlen, vtophys(hashdst_addr));
  516         param->desc1 = nlm_crypto_form_pkt_desc1(cipherlen, hashlen);
  517         param->desc2 = nlm_crypto_form_pkt_desc2(ivoff, 0, cipheroff, 0, 0,
  518                            hashoff);
  519         param->desc3 = nlm_crypto_form_pkt_desc3(0, ctrl->taglen, 0, 0,
  520                            hmacpad);
  521 }
  522 
  523 /**
  524 * @brief Top level function for generation pkt desc 0 to 3 for cipher operation
  525 * @ingroup crypto
  526 * ctrl : pointer to control structure
  527 * param : pointer to the param structure
  528 * encrypt : 1(for encrypt) 0(for decrypt)
  529 * ivoff : iv offset from start of data
  530 * ivlen : iv length in bytes
  531 * cipheroff : cipher offset from start of data
  532 * cipherlen : cipher length in bytes
  533 */
  534 static __inline__ void
  535 nlm_crypto_fill_cipher_pkt_param(struct nlm_crypto_pkt_ctrl *ctrl,
  536     struct nlm_crypto_pkt_param *param, unsigned int encrypt,
  537     unsigned int ivoff, unsigned int ivlen, unsigned int cipheroff,
  538     unsigned int cipherlen)
  539 {
  540         param->desc0 = nlm_crypto_form_pkt_desc0(0, 0, 0, encrypt, ivlen, 0ULL);
  541         param->desc1 = nlm_crypto_form_pkt_desc1(cipherlen, 1);
  542         param->desc2 = nlm_crypto_form_pkt_desc2(ivoff, 0, cipheroff, 0, 0, 0);
  543         param->desc3 = nlm_crypto_form_pkt_desc3(0, ctrl->taglen, 0, 0, 0);
  544 }
  545 
  546 /**
  547 * @brief Top level function for generation pkt desc 0 to 3 for auth operation
  548 * @ingroup crypto
  549 * ctrl : pointer to control structure
  550 * param : pointer to the param structure
  551 * hashoff : hash offset from start of data
  552 * hashlen : hash length in bytes
  553 * hmacpad : hmac padding required or not, 1 if already padded
  554 * hashdst_addr : hash destination physical address
  555 */
  556 static __inline__ void
  557 nlm_crypto_fill_auth_pkt_param(struct nlm_crypto_pkt_ctrl *ctrl,
  558     struct nlm_crypto_pkt_param *param, unsigned int hashoff,
  559     unsigned int hashlen, unsigned int hmacpad, unsigned char *hashdst_addr)
  560 {
  561         param->desc0 = nlm_crypto_form_pkt_desc0(0, 0, 1, 0, 1,
  562                            vtophys(hashdst_addr));
  563         param->desc1 = nlm_crypto_form_pkt_desc1(1, hashlen);
  564         param->desc2 = nlm_crypto_form_pkt_desc2(0, 0, 0, 0, 0, hashoff);
  565         param->desc3 = nlm_crypto_form_pkt_desc3(0, ctrl->taglen, 0, 0,
  566                            hmacpad);
  567 }
  568 
  569 static __inline__ unsigned int
  570 nlm_crypto_fill_src_seg(struct nlm_crypto_pkt_param *param, int seg,
  571     unsigned char *input, unsigned int inlen)
  572 {
  573         unsigned off = 0, len = 0;
  574         unsigned int remlen = inlen;
  575 
  576         for (; remlen > 0;) {
  577                 len = remlen > NLM_CRYPTO_MAX_SEG_LEN ?
  578                     NLM_CRYPTO_MAX_SEG_LEN : remlen;
  579                 param->segment[seg][0] = nlm_crypto_form_pkt_desc4(len,
  580                     vtophys(input + off));
  581                 remlen -= len;
  582                 off += len;
  583                 seg++;
  584         }
  585         return (seg);
  586 }
  587 
  588 static __inline__ unsigned int
  589 nlm_crypto_fill_dst_seg(struct nlm_crypto_pkt_param *param,
  590                 int seg, unsigned char *output, unsigned int outlen)
  591 {
  592         unsigned off = 0, len = 0;
  593         unsigned int remlen = outlen;
  594 
  595         for (; remlen > 0;) {
  596                 len = remlen > NLM_CRYPTO_MAX_SEG_LEN ?
  597                     NLM_CRYPTO_MAX_SEG_LEN : remlen;
  598                 param->segment[seg][1] = nlm_crypto_form_pkt_desc5(len, 1, 0,
  599                     vtophys(output + off));
  600                 remlen -= len;
  601                 off += len;
  602                 seg++;
  603         }
  604         return (seg);
  605 }
  606 
  607 #endif

Cache object: 0a130c0209bf82b7a969de76b3eec13e


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]


This page is part of the FreeBSD/Linux Linux Kernel Cross-Reference, and was automatically generated using a modified version of the LXR engine.