The Design and Implementation of the FreeBSD Operating System, Second Edition
Now available: The Design and Implementation of the FreeBSD Operating System (Second Edition)


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]

FreeBSD/Linux Kernel Cross Reference
sys/crypto/openssl/i386/vpaes-x86.S

Version: -  FREEBSD  -  FREEBSD-13-STABLE  -  FREEBSD-13-0  -  FREEBSD-12-STABLE  -  FREEBSD-12-0  -  FREEBSD-11-STABLE  -  FREEBSD-11-0  -  FREEBSD-10-STABLE  -  FREEBSD-10-0  -  FREEBSD-9-STABLE  -  FREEBSD-9-0  -  FREEBSD-8-STABLE  -  FREEBSD-8-0  -  FREEBSD-7-STABLE  -  FREEBSD-7-0  -  FREEBSD-6-STABLE  -  FREEBSD-6-0  -  FREEBSD-5-STABLE  -  FREEBSD-5-0  -  FREEBSD-4-STABLE  -  FREEBSD-3-STABLE  -  FREEBSD22  -  l41  -  OPENBSD  -  linux-2.6  -  MK84  -  PLAN9  -  xnu-8792 
SearchContext: -  none  -  3  -  10 

    1 /* $FreeBSD$ */
    2 /* Do not modify. This file is auto-generated from vpaes-x86.pl. */
    3 #ifdef PIC
    4 .text
    5 .align  64
    6 .L_vpaes_consts:
    7 .long   218628480,235210255,168496130,67568393
    8 .long   252381056,17041926,33884169,51187212
    9 .long   252645135,252645135,252645135,252645135
   10 .long   1512730624,3266504856,1377990664,3401244816
   11 .long   830229760,1275146365,2969422977,3447763452
   12 .long   3411033600,2979783055,338359620,2782886510
   13 .long   4209124096,907596821,221174255,1006095553
   14 .long   191964160,3799684038,3164090317,1589111125
   15 .long   182528256,1777043520,2877432650,3265356744
   16 .long   1874708224,3503451415,3305285752,363511674
   17 .long   1606117888,3487855781,1093350906,2384367825
   18 .long   197121,67569157,134941193,202313229
   19 .long   67569157,134941193,202313229,197121
   20 .long   134941193,202313229,197121,67569157
   21 .long   202313229,197121,67569157,134941193
   22 .long   33619971,100992007,168364043,235736079
   23 .long   235736079,33619971,100992007,168364043
   24 .long   168364043,235736079,33619971,100992007
   25 .long   100992007,168364043,235736079,33619971
   26 .long   50462976,117835012,185207048,252579084
   27 .long   252314880,51251460,117574920,184942860
   28 .long   184682752,252054788,50987272,118359308
   29 .long   118099200,185467140,251790600,50727180
   30 .long   2946363062,528716217,1300004225,1881839624
   31 .long   1532713819,1532713819,1532713819,1532713819
   32 .long   3602276352,4288629033,3737020424,4153884961
   33 .long   1354558464,32357713,2958822624,3775749553
   34 .long   1201988352,132424512,1572796698,503232858
   35 .long   2213177600,1597421020,4103937655,675398315
   36 .long   2749646592,4273543773,1511898873,121693092
   37 .long   3040248576,1103263732,2871565598,1608280554
   38 .long   2236667136,2588920351,482954393,64377734
   39 .long   3069987328,291237287,2117370568,3650299247
   40 .long   533321216,3573750986,2572112006,1401264716
   41 .long   1339849704,2721158661,548607111,3445553514
   42 .long   2128193280,3054596040,2183486460,1257083700
   43 .long   655635200,1165381986,3923443150,2344132524
   44 .long   190078720,256924420,290342170,357187870
   45 .long   1610966272,2263057382,4103205268,309794674
   46 .long   2592527872,2233205587,1335446729,3402964816
   47 .long   3973531904,3225098121,3002836325,1918774430
   48 .long   3870401024,2102906079,2284471353,4117666579
   49 .long   617007872,1021508343,366931923,691083277
   50 .long   2528395776,3491914898,2968704004,1613121270
   51 .long   3445188352,3247741094,844474987,4093578302
   52 .long   651481088,1190302358,1689581232,574775300
   53 .long   4289380608,206939853,2555985458,2489840491
   54 .long   2130264064,327674451,3566485037,3349835193
   55 .long   2470714624,316102159,3636825756,3393945945
   56 .byte   86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105
   57 .byte   111,110,32,65,69,83,32,102,111,114,32,120,56,54,47,83
   58 .byte   83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117
   59 .byte   114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105
   60 .byte   118,101,114,115,105,116,121,41,0
   61 .align  64
   62 .type   _vpaes_preheat,@function
   63 .align  16
   64 _vpaes_preheat:
   65         addl    (%esp),%ebp
   66         movdqa  -48(%ebp),%xmm7
   67         movdqa  -16(%ebp),%xmm6
   68         ret
   69 .size   _vpaes_preheat,.-_vpaes_preheat
   70 .type   _vpaes_encrypt_core,@function
   71 .align  16
   72 _vpaes_encrypt_core:
   73         movl    $16,%ecx
   74         movl    240(%edx),%eax
   75         movdqa  %xmm6,%xmm1
   76         movdqa  (%ebp),%xmm2
   77         pandn   %xmm0,%xmm1
   78         pand    %xmm6,%xmm0
   79         movdqu  (%edx),%xmm5
   80 .byte   102,15,56,0,208
   81         movdqa  16(%ebp),%xmm0
   82         pxor    %xmm5,%xmm2
   83         psrld   $4,%xmm1
   84         addl    $16,%edx
   85 .byte   102,15,56,0,193
   86         leal    192(%ebp),%ebx
   87         pxor    %xmm2,%xmm0
   88         jmp     .L000enc_entry
   89 .align  16
   90 .L001enc_loop:
   91         movdqa  32(%ebp),%xmm4
   92         movdqa  48(%ebp),%xmm0
   93 .byte   102,15,56,0,226
   94 .byte   102,15,56,0,195
   95         pxor    %xmm5,%xmm4
   96         movdqa  64(%ebp),%xmm5
   97         pxor    %xmm4,%xmm0
   98         movdqa  -64(%ebx,%ecx,1),%xmm1
   99 .byte   102,15,56,0,234
  100         movdqa  80(%ebp),%xmm2
  101         movdqa  (%ebx,%ecx,1),%xmm4
  102 .byte   102,15,56,0,211
  103         movdqa  %xmm0,%xmm3
  104         pxor    %xmm5,%xmm2
  105 .byte   102,15,56,0,193
  106         addl    $16,%edx
  107         pxor    %xmm2,%xmm0
  108 .byte   102,15,56,0,220
  109         addl    $16,%ecx
  110         pxor    %xmm0,%xmm3
  111 .byte   102,15,56,0,193
  112         andl    $48,%ecx
  113         subl    $1,%eax
  114         pxor    %xmm3,%xmm0
  115 .L000enc_entry:
  116         movdqa  %xmm6,%xmm1
  117         movdqa  -32(%ebp),%xmm5
  118         pandn   %xmm0,%xmm1
  119         psrld   $4,%xmm1
  120         pand    %xmm6,%xmm0
  121 .byte   102,15,56,0,232
  122         movdqa  %xmm7,%xmm3
  123         pxor    %xmm1,%xmm0
  124 .byte   102,15,56,0,217
  125         movdqa  %xmm7,%xmm4
  126         pxor    %xmm5,%xmm3
  127 .byte   102,15,56,0,224
  128         movdqa  %xmm7,%xmm2
  129         pxor    %xmm5,%xmm4
  130 .byte   102,15,56,0,211
  131         movdqa  %xmm7,%xmm3
  132         pxor    %xmm0,%xmm2
  133 .byte   102,15,56,0,220
  134         movdqu  (%edx),%xmm5
  135         pxor    %xmm1,%xmm3
  136         jnz     .L001enc_loop
  137         movdqa  96(%ebp),%xmm4
  138         movdqa  112(%ebp),%xmm0
  139 .byte   102,15,56,0,226
  140         pxor    %xmm5,%xmm4
  141 .byte   102,15,56,0,195
  142         movdqa  64(%ebx,%ecx,1),%xmm1
  143         pxor    %xmm4,%xmm0
  144 .byte   102,15,56,0,193
  145         ret
  146 .size   _vpaes_encrypt_core,.-_vpaes_encrypt_core
  147 .type   _vpaes_decrypt_core,@function
  148 .align  16
  149 _vpaes_decrypt_core:
  150         leal    608(%ebp),%ebx
  151         movl    240(%edx),%eax
  152         movdqa  %xmm6,%xmm1
  153         movdqa  -64(%ebx),%xmm2
  154         pandn   %xmm0,%xmm1
  155         movl    %eax,%ecx
  156         psrld   $4,%xmm1
  157         movdqu  (%edx),%xmm5
  158         shll    $4,%ecx
  159         pand    %xmm6,%xmm0
  160 .byte   102,15,56,0,208
  161         movdqa  -48(%ebx),%xmm0
  162         xorl    $48,%ecx
  163 .byte   102,15,56,0,193
  164         andl    $48,%ecx
  165         pxor    %xmm5,%xmm2
  166         movdqa  176(%ebp),%xmm5
  167         pxor    %xmm2,%xmm0
  168         addl    $16,%edx
  169         leal    -352(%ebx,%ecx,1),%ecx
  170         jmp     .L002dec_entry
  171 .align  16
  172 .L003dec_loop:
  173         movdqa  -32(%ebx),%xmm4
  174         movdqa  -16(%ebx),%xmm1
  175 .byte   102,15,56,0,226
  176 .byte   102,15,56,0,203
  177         pxor    %xmm4,%xmm0
  178         movdqa  (%ebx),%xmm4
  179         pxor    %xmm1,%xmm0
  180         movdqa  16(%ebx),%xmm1
  181 .byte   102,15,56,0,226
  182 .byte   102,15,56,0,197
  183 .byte   102,15,56,0,203
  184         pxor    %xmm4,%xmm0
  185         movdqa  32(%ebx),%xmm4
  186         pxor    %xmm1,%xmm0
  187         movdqa  48(%ebx),%xmm1
  188 .byte   102,15,56,0,226
  189 .byte   102,15,56,0,197
  190 .byte   102,15,56,0,203
  191         pxor    %xmm4,%xmm0
  192         movdqa  64(%ebx),%xmm4
  193         pxor    %xmm1,%xmm0
  194         movdqa  80(%ebx),%xmm1
  195 .byte   102,15,56,0,226
  196 .byte   102,15,56,0,197
  197 .byte   102,15,56,0,203
  198         pxor    %xmm4,%xmm0
  199         addl    $16,%edx
  200 .byte   102,15,58,15,237,12
  201         pxor    %xmm1,%xmm0
  202         subl    $1,%eax
  203 .L002dec_entry:
  204         movdqa  %xmm6,%xmm1
  205         movdqa  -32(%ebp),%xmm2
  206         pandn   %xmm0,%xmm1
  207         pand    %xmm6,%xmm0
  208         psrld   $4,%xmm1
  209 .byte   102,15,56,0,208
  210         movdqa  %xmm7,%xmm3
  211         pxor    %xmm1,%xmm0
  212 .byte   102,15,56,0,217
  213         movdqa  %xmm7,%xmm4
  214         pxor    %xmm2,%xmm3
  215 .byte   102,15,56,0,224
  216         pxor    %xmm2,%xmm4
  217         movdqa  %xmm7,%xmm2
  218 .byte   102,15,56,0,211
  219         movdqa  %xmm7,%xmm3
  220         pxor    %xmm0,%xmm2
  221 .byte   102,15,56,0,220
  222         movdqu  (%edx),%xmm0
  223         pxor    %xmm1,%xmm3
  224         jnz     .L003dec_loop
  225         movdqa  96(%ebx),%xmm4
  226 .byte   102,15,56,0,226
  227         pxor    %xmm0,%xmm4
  228         movdqa  112(%ebx),%xmm0
  229         movdqa  (%ecx),%xmm2
  230 .byte   102,15,56,0,195
  231         pxor    %xmm4,%xmm0
  232 .byte   102,15,56,0,194
  233         ret
  234 .size   _vpaes_decrypt_core,.-_vpaes_decrypt_core
  235 .type   _vpaes_schedule_core,@function
  236 .align  16
  237 _vpaes_schedule_core:
  238         addl    (%esp),%ebp
  239         movdqu  (%esi),%xmm0
  240         movdqa  320(%ebp),%xmm2
  241         movdqa  %xmm0,%xmm3
  242         leal    (%ebp),%ebx
  243         movdqa  %xmm2,4(%esp)
  244         call    _vpaes_schedule_transform
  245         movdqa  %xmm0,%xmm7
  246         testl   %edi,%edi
  247         jnz     .L004schedule_am_decrypting
  248         movdqu  %xmm0,(%edx)
  249         jmp     .L005schedule_go
  250 .L004schedule_am_decrypting:
  251         movdqa  256(%ebp,%ecx,1),%xmm1
  252 .byte   102,15,56,0,217
  253         movdqu  %xmm3,(%edx)
  254         xorl    $48,%ecx
  255 .L005schedule_go:
  256         cmpl    $192,%eax
  257         ja      .L006schedule_256
  258         je      .L007schedule_192
  259 .L008schedule_128:
  260         movl    $10,%eax
  261 .L009loop_schedule_128:
  262         call    _vpaes_schedule_round
  263         decl    %eax
  264         jz      .L010schedule_mangle_last
  265         call    _vpaes_schedule_mangle
  266         jmp     .L009loop_schedule_128
  267 .align  16
  268 .L007schedule_192:
  269         movdqu  8(%esi),%xmm0
  270         call    _vpaes_schedule_transform
  271         movdqa  %xmm0,%xmm6
  272         pxor    %xmm4,%xmm4
  273         movhlps %xmm4,%xmm6
  274         movl    $4,%eax
  275 .L011loop_schedule_192:
  276         call    _vpaes_schedule_round
  277 .byte   102,15,58,15,198,8
  278         call    _vpaes_schedule_mangle
  279         call    _vpaes_schedule_192_smear
  280         call    _vpaes_schedule_mangle
  281         call    _vpaes_schedule_round
  282         decl    %eax
  283         jz      .L010schedule_mangle_last
  284         call    _vpaes_schedule_mangle
  285         call    _vpaes_schedule_192_smear
  286         jmp     .L011loop_schedule_192
  287 .align  16
  288 .L006schedule_256:
  289         movdqu  16(%esi),%xmm0
  290         call    _vpaes_schedule_transform
  291         movl    $7,%eax
  292 .L012loop_schedule_256:
  293         call    _vpaes_schedule_mangle
  294         movdqa  %xmm0,%xmm6
  295         call    _vpaes_schedule_round
  296         decl    %eax
  297         jz      .L010schedule_mangle_last
  298         call    _vpaes_schedule_mangle
  299         pshufd  $255,%xmm0,%xmm0
  300         movdqa  %xmm7,20(%esp)
  301         movdqa  %xmm6,%xmm7
  302         call    .L_vpaes_schedule_low_round
  303         movdqa  20(%esp),%xmm7
  304         jmp     .L012loop_schedule_256
  305 .align  16
  306 .L010schedule_mangle_last:
  307         leal    384(%ebp),%ebx
  308         testl   %edi,%edi
  309         jnz     .L013schedule_mangle_last_dec
  310         movdqa  256(%ebp,%ecx,1),%xmm1
  311 .byte   102,15,56,0,193
  312         leal    352(%ebp),%ebx
  313         addl    $32,%edx
  314 .L013schedule_mangle_last_dec:
  315         addl    $-16,%edx
  316         pxor    336(%ebp),%xmm0
  317         call    _vpaes_schedule_transform
  318         movdqu  %xmm0,(%edx)
  319         pxor    %xmm0,%xmm0
  320         pxor    %xmm1,%xmm1
  321         pxor    %xmm2,%xmm2
  322         pxor    %xmm3,%xmm3
  323         pxor    %xmm4,%xmm4
  324         pxor    %xmm5,%xmm5
  325         pxor    %xmm6,%xmm6
  326         pxor    %xmm7,%xmm7
  327         ret
  328 .size   _vpaes_schedule_core,.-_vpaes_schedule_core
  329 .type   _vpaes_schedule_192_smear,@function
  330 .align  16
  331 _vpaes_schedule_192_smear:
  332         pshufd  $128,%xmm6,%xmm1
  333         pshufd  $254,%xmm7,%xmm0
  334         pxor    %xmm1,%xmm6
  335         pxor    %xmm1,%xmm1
  336         pxor    %xmm0,%xmm6
  337         movdqa  %xmm6,%xmm0
  338         movhlps %xmm1,%xmm6
  339         ret
  340 .size   _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear
  341 .type   _vpaes_schedule_round,@function
  342 .align  16
  343 _vpaes_schedule_round:
  344         movdqa  8(%esp),%xmm2
  345         pxor    %xmm1,%xmm1
  346 .byte   102,15,58,15,202,15
  347 .byte   102,15,58,15,210,15
  348         pxor    %xmm1,%xmm7
  349         pshufd  $255,%xmm0,%xmm0
  350 .byte   102,15,58,15,192,1
  351         movdqa  %xmm2,8(%esp)
  352 .L_vpaes_schedule_low_round:
  353         movdqa  %xmm7,%xmm1
  354         pslldq  $4,%xmm7
  355         pxor    %xmm1,%xmm7
  356         movdqa  %xmm7,%xmm1
  357         pslldq  $8,%xmm7
  358         pxor    %xmm1,%xmm7
  359         pxor    336(%ebp),%xmm7
  360         movdqa  -16(%ebp),%xmm4
  361         movdqa  -48(%ebp),%xmm5
  362         movdqa  %xmm4,%xmm1
  363         pandn   %xmm0,%xmm1
  364         psrld   $4,%xmm1
  365         pand    %xmm4,%xmm0
  366         movdqa  -32(%ebp),%xmm2
  367 .byte   102,15,56,0,208
  368         pxor    %xmm1,%xmm0
  369         movdqa  %xmm5,%xmm3
  370 .byte   102,15,56,0,217
  371         pxor    %xmm2,%xmm3
  372         movdqa  %xmm5,%xmm4
  373 .byte   102,15,56,0,224
  374         pxor    %xmm2,%xmm4
  375         movdqa  %xmm5,%xmm2
  376 .byte   102,15,56,0,211
  377         pxor    %xmm0,%xmm2
  378         movdqa  %xmm5,%xmm3
  379 .byte   102,15,56,0,220
  380         pxor    %xmm1,%xmm3
  381         movdqa  32(%ebp),%xmm4
  382 .byte   102,15,56,0,226
  383         movdqa  48(%ebp),%xmm0
  384 .byte   102,15,56,0,195
  385         pxor    %xmm4,%xmm0
  386         pxor    %xmm7,%xmm0
  387         movdqa  %xmm0,%xmm7
  388         ret
  389 .size   _vpaes_schedule_round,.-_vpaes_schedule_round
  390 .type   _vpaes_schedule_transform,@function
  391 .align  16
  392 _vpaes_schedule_transform:
  393         movdqa  -16(%ebp),%xmm2
  394         movdqa  %xmm2,%xmm1
  395         pandn   %xmm0,%xmm1
  396         psrld   $4,%xmm1
  397         pand    %xmm2,%xmm0
  398         movdqa  (%ebx),%xmm2
  399 .byte   102,15,56,0,208
  400         movdqa  16(%ebx),%xmm0
  401 .byte   102,15,56,0,193
  402         pxor    %xmm2,%xmm0
  403         ret
  404 .size   _vpaes_schedule_transform,.-_vpaes_schedule_transform
  405 .type   _vpaes_schedule_mangle,@function
  406 .align  16
  407 _vpaes_schedule_mangle:
  408         movdqa  %xmm0,%xmm4
  409         movdqa  128(%ebp),%xmm5
  410         testl   %edi,%edi
  411         jnz     .L014schedule_mangle_dec
  412         addl    $16,%edx
  413         pxor    336(%ebp),%xmm4
  414 .byte   102,15,56,0,229
  415         movdqa  %xmm4,%xmm3
  416 .byte   102,15,56,0,229
  417         pxor    %xmm4,%xmm3
  418 .byte   102,15,56,0,229
  419         pxor    %xmm4,%xmm3
  420         jmp     .L015schedule_mangle_both
  421 .align  16
  422 .L014schedule_mangle_dec:
  423         movdqa  -16(%ebp),%xmm2
  424         leal    416(%ebp),%esi
  425         movdqa  %xmm2,%xmm1
  426         pandn   %xmm4,%xmm1
  427         psrld   $4,%xmm1
  428         pand    %xmm2,%xmm4
  429         movdqa  (%esi),%xmm2
  430 .byte   102,15,56,0,212
  431         movdqa  16(%esi),%xmm3
  432 .byte   102,15,56,0,217
  433         pxor    %xmm2,%xmm3
  434 .byte   102,15,56,0,221
  435         movdqa  32(%esi),%xmm2
  436 .byte   102,15,56,0,212
  437         pxor    %xmm3,%xmm2
  438         movdqa  48(%esi),%xmm3
  439 .byte   102,15,56,0,217
  440         pxor    %xmm2,%xmm3
  441 .byte   102,15,56,0,221
  442         movdqa  64(%esi),%xmm2
  443 .byte   102,15,56,0,212
  444         pxor    %xmm3,%xmm2
  445         movdqa  80(%esi),%xmm3
  446 .byte   102,15,56,0,217
  447         pxor    %xmm2,%xmm3
  448 .byte   102,15,56,0,221
  449         movdqa  96(%esi),%xmm2
  450 .byte   102,15,56,0,212
  451         pxor    %xmm3,%xmm2
  452         movdqa  112(%esi),%xmm3
  453 .byte   102,15,56,0,217
  454         pxor    %xmm2,%xmm3
  455         addl    $-16,%edx
  456 .L015schedule_mangle_both:
  457         movdqa  256(%ebp,%ecx,1),%xmm1
  458 .byte   102,15,56,0,217
  459         addl    $-16,%ecx
  460         andl    $48,%ecx
  461         movdqu  %xmm3,(%edx)
  462         ret
  463 .size   _vpaes_schedule_mangle,.-_vpaes_schedule_mangle
  464 .globl  vpaes_set_encrypt_key
  465 .type   vpaes_set_encrypt_key,@function
  466 .align  16
  467 vpaes_set_encrypt_key:
  468 .L_vpaes_set_encrypt_key_begin:
  469         pushl   %ebp
  470         pushl   %ebx
  471         pushl   %esi
  472         pushl   %edi
  473         movl    20(%esp),%esi
  474         leal    -56(%esp),%ebx
  475         movl    24(%esp),%eax
  476         andl    $-16,%ebx
  477         movl    28(%esp),%edx
  478         xchgl   %esp,%ebx
  479         movl    %ebx,48(%esp)
  480         movl    %eax,%ebx
  481         shrl    $5,%ebx
  482         addl    $5,%ebx
  483         movl    %ebx,240(%edx)
  484         movl    $48,%ecx
  485         movl    $0,%edi
  486         leal    .L_vpaes_consts+0x30-.L016pic_point,%ebp
  487         call    _vpaes_schedule_core
  488 .L016pic_point:
  489         movl    48(%esp),%esp
  490         xorl    %eax,%eax
  491         popl    %edi
  492         popl    %esi
  493         popl    %ebx
  494         popl    %ebp
  495         ret
  496 .size   vpaes_set_encrypt_key,.-.L_vpaes_set_encrypt_key_begin
  497 .globl  vpaes_set_decrypt_key
  498 .type   vpaes_set_decrypt_key,@function
  499 .align  16
  500 vpaes_set_decrypt_key:
  501 .L_vpaes_set_decrypt_key_begin:
  502         pushl   %ebp
  503         pushl   %ebx
  504         pushl   %esi
  505         pushl   %edi
  506         movl    20(%esp),%esi
  507         leal    -56(%esp),%ebx
  508         movl    24(%esp),%eax
  509         andl    $-16,%ebx
  510         movl    28(%esp),%edx
  511         xchgl   %esp,%ebx
  512         movl    %ebx,48(%esp)
  513         movl    %eax,%ebx
  514         shrl    $5,%ebx
  515         addl    $5,%ebx
  516         movl    %ebx,240(%edx)
  517         shll    $4,%ebx
  518         leal    16(%edx,%ebx,1),%edx
  519         movl    $1,%edi
  520         movl    %eax,%ecx
  521         shrl    $1,%ecx
  522         andl    $32,%ecx
  523         xorl    $32,%ecx
  524         leal    .L_vpaes_consts+0x30-.L017pic_point,%ebp
  525         call    _vpaes_schedule_core
  526 .L017pic_point:
  527         movl    48(%esp),%esp
  528         xorl    %eax,%eax
  529         popl    %edi
  530         popl    %esi
  531         popl    %ebx
  532         popl    %ebp
  533         ret
  534 .size   vpaes_set_decrypt_key,.-.L_vpaes_set_decrypt_key_begin
  535 .globl  vpaes_encrypt
  536 .type   vpaes_encrypt,@function
  537 .align  16
  538 vpaes_encrypt:
  539 .L_vpaes_encrypt_begin:
  540         pushl   %ebp
  541         pushl   %ebx
  542         pushl   %esi
  543         pushl   %edi
  544         leal    .L_vpaes_consts+0x30-.L018pic_point,%ebp
  545         call    _vpaes_preheat
  546 .L018pic_point:
  547         movl    20(%esp),%esi
  548         leal    -56(%esp),%ebx
  549         movl    24(%esp),%edi
  550         andl    $-16,%ebx
  551         movl    28(%esp),%edx
  552         xchgl   %esp,%ebx
  553         movl    %ebx,48(%esp)
  554         movdqu  (%esi),%xmm0
  555         call    _vpaes_encrypt_core
  556         movdqu  %xmm0,(%edi)
  557         movl    48(%esp),%esp
  558         popl    %edi
  559         popl    %esi
  560         popl    %ebx
  561         popl    %ebp
  562         ret
  563 .size   vpaes_encrypt,.-.L_vpaes_encrypt_begin
  564 .globl  vpaes_decrypt
  565 .type   vpaes_decrypt,@function
  566 .align  16
  567 vpaes_decrypt:
  568 .L_vpaes_decrypt_begin:
  569         pushl   %ebp
  570         pushl   %ebx
  571         pushl   %esi
  572         pushl   %edi
  573         leal    .L_vpaes_consts+0x30-.L019pic_point,%ebp
  574         call    _vpaes_preheat
  575 .L019pic_point:
  576         movl    20(%esp),%esi
  577         leal    -56(%esp),%ebx
  578         movl    24(%esp),%edi
  579         andl    $-16,%ebx
  580         movl    28(%esp),%edx
  581         xchgl   %esp,%ebx
  582         movl    %ebx,48(%esp)
  583         movdqu  (%esi),%xmm0
  584         call    _vpaes_decrypt_core
  585         movdqu  %xmm0,(%edi)
  586         movl    48(%esp),%esp
  587         popl    %edi
  588         popl    %esi
  589         popl    %ebx
  590         popl    %ebp
  591         ret
  592 .size   vpaes_decrypt,.-.L_vpaes_decrypt_begin
  593 .globl  vpaes_cbc_encrypt
  594 .type   vpaes_cbc_encrypt,@function
  595 .align  16
  596 vpaes_cbc_encrypt:
  597 .L_vpaes_cbc_encrypt_begin:
  598         pushl   %ebp
  599         pushl   %ebx
  600         pushl   %esi
  601         pushl   %edi
  602         movl    20(%esp),%esi
  603         movl    24(%esp),%edi
  604         movl    28(%esp),%eax
  605         movl    32(%esp),%edx
  606         subl    $16,%eax
  607         jc      .L020cbc_abort
  608         leal    -56(%esp),%ebx
  609         movl    36(%esp),%ebp
  610         andl    $-16,%ebx
  611         movl    40(%esp),%ecx
  612         xchgl   %esp,%ebx
  613         movdqu  (%ebp),%xmm1
  614         subl    %esi,%edi
  615         movl    %ebx,48(%esp)
  616         movl    %edi,(%esp)
  617         movl    %edx,4(%esp)
  618         movl    %ebp,8(%esp)
  619         movl    %eax,%edi
  620         leal    .L_vpaes_consts+0x30-.L021pic_point,%ebp
  621         call    _vpaes_preheat
  622 .L021pic_point:
  623         cmpl    $0,%ecx
  624         je      .L022cbc_dec_loop
  625         jmp     .L023cbc_enc_loop
  626 .align  16
  627 .L023cbc_enc_loop:
  628         movdqu  (%esi),%xmm0
  629         pxor    %xmm1,%xmm0
  630         call    _vpaes_encrypt_core
  631         movl    (%esp),%ebx
  632         movl    4(%esp),%edx
  633         movdqa  %xmm0,%xmm1
  634         movdqu  %xmm0,(%ebx,%esi,1)
  635         leal    16(%esi),%esi
  636         subl    $16,%edi
  637         jnc     .L023cbc_enc_loop
  638         jmp     .L024cbc_done
  639 .align  16
  640 .L022cbc_dec_loop:
  641         movdqu  (%esi),%xmm0
  642         movdqa  %xmm1,16(%esp)
  643         movdqa  %xmm0,32(%esp)
  644         call    _vpaes_decrypt_core
  645         movl    (%esp),%ebx
  646         movl    4(%esp),%edx
  647         pxor    16(%esp),%xmm0
  648         movdqa  32(%esp),%xmm1
  649         movdqu  %xmm0,(%ebx,%esi,1)
  650         leal    16(%esi),%esi
  651         subl    $16,%edi
  652         jnc     .L022cbc_dec_loop
  653 .L024cbc_done:
  654         movl    8(%esp),%ebx
  655         movl    48(%esp),%esp
  656         movdqu  %xmm1,(%ebx)
  657 .L020cbc_abort:
  658         popl    %edi
  659         popl    %esi
  660         popl    %ebx
  661         popl    %ebp
  662         ret
  663 .size   vpaes_cbc_encrypt,.-.L_vpaes_cbc_encrypt_begin
  664 #else
  665 .text
  666 .align  64
  667 .L_vpaes_consts:
  668 .long   218628480,235210255,168496130,67568393
  669 .long   252381056,17041926,33884169,51187212
  670 .long   252645135,252645135,252645135,252645135
  671 .long   1512730624,3266504856,1377990664,3401244816
  672 .long   830229760,1275146365,2969422977,3447763452
  673 .long   3411033600,2979783055,338359620,2782886510
  674 .long   4209124096,907596821,221174255,1006095553
  675 .long   191964160,3799684038,3164090317,1589111125
  676 .long   182528256,1777043520,2877432650,3265356744
  677 .long   1874708224,3503451415,3305285752,363511674
  678 .long   1606117888,3487855781,1093350906,2384367825
  679 .long   197121,67569157,134941193,202313229
  680 .long   67569157,134941193,202313229,197121
  681 .long   134941193,202313229,197121,67569157
  682 .long   202313229,197121,67569157,134941193
  683 .long   33619971,100992007,168364043,235736079
  684 .long   235736079,33619971,100992007,168364043
  685 .long   168364043,235736079,33619971,100992007
  686 .long   100992007,168364043,235736079,33619971
  687 .long   50462976,117835012,185207048,252579084
  688 .long   252314880,51251460,117574920,184942860
  689 .long   184682752,252054788,50987272,118359308
  690 .long   118099200,185467140,251790600,50727180
  691 .long   2946363062,528716217,1300004225,1881839624
  692 .long   1532713819,1532713819,1532713819,1532713819
  693 .long   3602276352,4288629033,3737020424,4153884961
  694 .long   1354558464,32357713,2958822624,3775749553
  695 .long   1201988352,132424512,1572796698,503232858
  696 .long   2213177600,1597421020,4103937655,675398315
  697 .long   2749646592,4273543773,1511898873,121693092
  698 .long   3040248576,1103263732,2871565598,1608280554
  699 .long   2236667136,2588920351,482954393,64377734
  700 .long   3069987328,291237287,2117370568,3650299247
  701 .long   533321216,3573750986,2572112006,1401264716
  702 .long   1339849704,2721158661,548607111,3445553514
  703 .long   2128193280,3054596040,2183486460,1257083700
  704 .long   655635200,1165381986,3923443150,2344132524
  705 .long   190078720,256924420,290342170,357187870
  706 .long   1610966272,2263057382,4103205268,309794674
  707 .long   2592527872,2233205587,1335446729,3402964816
  708 .long   3973531904,3225098121,3002836325,1918774430
  709 .long   3870401024,2102906079,2284471353,4117666579
  710 .long   617007872,1021508343,366931923,691083277
  711 .long   2528395776,3491914898,2968704004,1613121270
  712 .long   3445188352,3247741094,844474987,4093578302
  713 .long   651481088,1190302358,1689581232,574775300
  714 .long   4289380608,206939853,2555985458,2489840491
  715 .long   2130264064,327674451,3566485037,3349835193
  716 .long   2470714624,316102159,3636825756,3393945945
  717 .byte   86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105
  718 .byte   111,110,32,65,69,83,32,102,111,114,32,120,56,54,47,83
  719 .byte   83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117
  720 .byte   114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105
  721 .byte   118,101,114,115,105,116,121,41,0
  722 .align  64
  723 .type   _vpaes_preheat,@function
  724 .align  16
  725 _vpaes_preheat:
  726         addl    (%esp),%ebp
  727         movdqa  -48(%ebp),%xmm7
  728         movdqa  -16(%ebp),%xmm6
  729         ret
  730 .size   _vpaes_preheat,.-_vpaes_preheat
  731 .type   _vpaes_encrypt_core,@function
  732 .align  16
  733 _vpaes_encrypt_core:
  734         movl    $16,%ecx
  735         movl    240(%edx),%eax
  736         movdqa  %xmm6,%xmm1
  737         movdqa  (%ebp),%xmm2
  738         pandn   %xmm0,%xmm1
  739         pand    %xmm6,%xmm0
  740         movdqu  (%edx),%xmm5
  741 .byte   102,15,56,0,208
  742         movdqa  16(%ebp),%xmm0
  743         pxor    %xmm5,%xmm2
  744         psrld   $4,%xmm1
  745         addl    $16,%edx
  746 .byte   102,15,56,0,193
  747         leal    192(%ebp),%ebx
  748         pxor    %xmm2,%xmm0
  749         jmp     .L000enc_entry
  750 .align  16
  751 .L001enc_loop:
  752         movdqa  32(%ebp),%xmm4
  753         movdqa  48(%ebp),%xmm0
  754 .byte   102,15,56,0,226
  755 .byte   102,15,56,0,195
  756         pxor    %xmm5,%xmm4
  757         movdqa  64(%ebp),%xmm5
  758         pxor    %xmm4,%xmm0
  759         movdqa  -64(%ebx,%ecx,1),%xmm1
  760 .byte   102,15,56,0,234
  761         movdqa  80(%ebp),%xmm2
  762         movdqa  (%ebx,%ecx,1),%xmm4
  763 .byte   102,15,56,0,211
  764         movdqa  %xmm0,%xmm3
  765         pxor    %xmm5,%xmm2
  766 .byte   102,15,56,0,193
  767         addl    $16,%edx
  768         pxor    %xmm2,%xmm0
  769 .byte   102,15,56,0,220
  770         addl    $16,%ecx
  771         pxor    %xmm0,%xmm3
  772 .byte   102,15,56,0,193
  773         andl    $48,%ecx
  774         subl    $1,%eax
  775         pxor    %xmm3,%xmm0
  776 .L000enc_entry:
  777         movdqa  %xmm6,%xmm1
  778         movdqa  -32(%ebp),%xmm5
  779         pandn   %xmm0,%xmm1
  780         psrld   $4,%xmm1
  781         pand    %xmm6,%xmm0
  782 .byte   102,15,56,0,232
  783         movdqa  %xmm7,%xmm3
  784         pxor    %xmm1,%xmm0
  785 .byte   102,15,56,0,217
  786         movdqa  %xmm7,%xmm4
  787         pxor    %xmm5,%xmm3
  788 .byte   102,15,56,0,224
  789         movdqa  %xmm7,%xmm2
  790         pxor    %xmm5,%xmm4
  791 .byte   102,15,56,0,211
  792         movdqa  %xmm7,%xmm3
  793         pxor    %xmm0,%xmm2
  794 .byte   102,15,56,0,220
  795         movdqu  (%edx),%xmm5
  796         pxor    %xmm1,%xmm3
  797         jnz     .L001enc_loop
  798         movdqa  96(%ebp),%xmm4
  799         movdqa  112(%ebp),%xmm0
  800 .byte   102,15,56,0,226
  801         pxor    %xmm5,%xmm4
  802 .byte   102,15,56,0,195
  803         movdqa  64(%ebx,%ecx,1),%xmm1
  804         pxor    %xmm4,%xmm0
  805 .byte   102,15,56,0,193
  806         ret
  807 .size   _vpaes_encrypt_core,.-_vpaes_encrypt_core
  808 .type   _vpaes_decrypt_core,@function
  809 .align  16
  810 _vpaes_decrypt_core:
  811         leal    608(%ebp),%ebx
  812         movl    240(%edx),%eax
  813         movdqa  %xmm6,%xmm1
  814         movdqa  -64(%ebx),%xmm2
  815         pandn   %xmm0,%xmm1
  816         movl    %eax,%ecx
  817         psrld   $4,%xmm1
  818         movdqu  (%edx),%xmm5
  819         shll    $4,%ecx
  820         pand    %xmm6,%xmm0
  821 .byte   102,15,56,0,208
  822         movdqa  -48(%ebx),%xmm0
  823         xorl    $48,%ecx
  824 .byte   102,15,56,0,193
  825         andl    $48,%ecx
  826         pxor    %xmm5,%xmm2
  827         movdqa  176(%ebp),%xmm5
  828         pxor    %xmm2,%xmm0
  829         addl    $16,%edx
  830         leal    -352(%ebx,%ecx,1),%ecx
  831         jmp     .L002dec_entry
  832 .align  16
  833 .L003dec_loop:
  834         movdqa  -32(%ebx),%xmm4
  835         movdqa  -16(%ebx),%xmm1
  836 .byte   102,15,56,0,226
  837 .byte   102,15,56,0,203
  838         pxor    %xmm4,%xmm0
  839         movdqa  (%ebx),%xmm4
  840         pxor    %xmm1,%xmm0
  841         movdqa  16(%ebx),%xmm1
  842 .byte   102,15,56,0,226
  843 .byte   102,15,56,0,197
  844 .byte   102,15,56,0,203
  845         pxor    %xmm4,%xmm0
  846         movdqa  32(%ebx),%xmm4
  847         pxor    %xmm1,%xmm0
  848         movdqa  48(%ebx),%xmm1
  849 .byte   102,15,56,0,226
  850 .byte   102,15,56,0,197
  851 .byte   102,15,56,0,203
  852         pxor    %xmm4,%xmm0
  853         movdqa  64(%ebx),%xmm4
  854         pxor    %xmm1,%xmm0
  855         movdqa  80(%ebx),%xmm1
  856 .byte   102,15,56,0,226
  857 .byte   102,15,56,0,197
  858 .byte   102,15,56,0,203
  859         pxor    %xmm4,%xmm0
  860         addl    $16,%edx
  861 .byte   102,15,58,15,237,12
  862         pxor    %xmm1,%xmm0
  863         subl    $1,%eax
  864 .L002dec_entry:
  865         movdqa  %xmm6,%xmm1
  866         movdqa  -32(%ebp),%xmm2
  867         pandn   %xmm0,%xmm1
  868         pand    %xmm6,%xmm0
  869         psrld   $4,%xmm1
  870 .byte   102,15,56,0,208
  871         movdqa  %xmm7,%xmm3
  872         pxor    %xmm1,%xmm0
  873 .byte   102,15,56,0,217
  874         movdqa  %xmm7,%xmm4
  875         pxor    %xmm2,%xmm3
  876 .byte   102,15,56,0,224
  877         pxor    %xmm2,%xmm4
  878         movdqa  %xmm7,%xmm2
  879 .byte   102,15,56,0,211
  880         movdqa  %xmm7,%xmm3
  881         pxor    %xmm0,%xmm2
  882 .byte   102,15,56,0,220
  883         movdqu  (%edx),%xmm0
  884         pxor    %xmm1,%xmm3
  885         jnz     .L003dec_loop
  886         movdqa  96(%ebx),%xmm4
  887 .byte   102,15,56,0,226
  888         pxor    %xmm0,%xmm4
  889         movdqa  112(%ebx),%xmm0
  890         movdqa  (%ecx),%xmm2
  891 .byte   102,15,56,0,195
  892         pxor    %xmm4,%xmm0
  893 .byte   102,15,56,0,194
  894         ret
  895 .size   _vpaes_decrypt_core,.-_vpaes_decrypt_core
  896 .type   _vpaes_schedule_core,@function
  897 .align  16
  898 _vpaes_schedule_core:
  899         addl    (%esp),%ebp
  900         movdqu  (%esi),%xmm0
  901         movdqa  320(%ebp),%xmm2
  902         movdqa  %xmm0,%xmm3
  903         leal    (%ebp),%ebx
  904         movdqa  %xmm2,4(%esp)
  905         call    _vpaes_schedule_transform
  906         movdqa  %xmm0,%xmm7
  907         testl   %edi,%edi
  908         jnz     .L004schedule_am_decrypting
  909         movdqu  %xmm0,(%edx)
  910         jmp     .L005schedule_go
  911 .L004schedule_am_decrypting:
  912         movdqa  256(%ebp,%ecx,1),%xmm1
  913 .byte   102,15,56,0,217
  914         movdqu  %xmm3,(%edx)
  915         xorl    $48,%ecx
  916 .L005schedule_go:
  917         cmpl    $192,%eax
  918         ja      .L006schedule_256
  919         je      .L007schedule_192
  920 .L008schedule_128:
  921         movl    $10,%eax
  922 .L009loop_schedule_128:
  923         call    _vpaes_schedule_round
  924         decl    %eax
  925         jz      .L010schedule_mangle_last
  926         call    _vpaes_schedule_mangle
  927         jmp     .L009loop_schedule_128
  928 .align  16
  929 .L007schedule_192:
  930         movdqu  8(%esi),%xmm0
  931         call    _vpaes_schedule_transform
  932         movdqa  %xmm0,%xmm6
  933         pxor    %xmm4,%xmm4
  934         movhlps %xmm4,%xmm6
  935         movl    $4,%eax
  936 .L011loop_schedule_192:
  937         call    _vpaes_schedule_round
  938 .byte   102,15,58,15,198,8
  939         call    _vpaes_schedule_mangle
  940         call    _vpaes_schedule_192_smear
  941         call    _vpaes_schedule_mangle
  942         call    _vpaes_schedule_round
  943         decl    %eax
  944         jz      .L010schedule_mangle_last
  945         call    _vpaes_schedule_mangle
  946         call    _vpaes_schedule_192_smear
  947         jmp     .L011loop_schedule_192
  948 .align  16
  949 .L006schedule_256:
  950         movdqu  16(%esi),%xmm0
  951         call    _vpaes_schedule_transform
  952         movl    $7,%eax
  953 .L012loop_schedule_256:
  954         call    _vpaes_schedule_mangle
  955         movdqa  %xmm0,%xmm6
  956         call    _vpaes_schedule_round
  957         decl    %eax
  958         jz      .L010schedule_mangle_last
  959         call    _vpaes_schedule_mangle
  960         pshufd  $255,%xmm0,%xmm0
  961         movdqa  %xmm7,20(%esp)
  962         movdqa  %xmm6,%xmm7
  963         call    .L_vpaes_schedule_low_round
  964         movdqa  20(%esp),%xmm7
  965         jmp     .L012loop_schedule_256
  966 .align  16
  967 .L010schedule_mangle_last:
  968         leal    384(%ebp),%ebx
  969         testl   %edi,%edi
  970         jnz     .L013schedule_mangle_last_dec
  971         movdqa  256(%ebp,%ecx,1),%xmm1
  972 .byte   102,15,56,0,193
  973         leal    352(%ebp),%ebx
  974         addl    $32,%edx
  975 .L013schedule_mangle_last_dec:
  976         addl    $-16,%edx
  977         pxor    336(%ebp),%xmm0
  978         call    _vpaes_schedule_transform
  979         movdqu  %xmm0,(%edx)
  980         pxor    %xmm0,%xmm0
  981         pxor    %xmm1,%xmm1
  982         pxor    %xmm2,%xmm2
  983         pxor    %xmm3,%xmm3
  984         pxor    %xmm4,%xmm4
  985         pxor    %xmm5,%xmm5
  986         pxor    %xmm6,%xmm6
  987         pxor    %xmm7,%xmm7
  988         ret
  989 .size   _vpaes_schedule_core,.-_vpaes_schedule_core
  990 .type   _vpaes_schedule_192_smear,@function
  991 .align  16
  992 _vpaes_schedule_192_smear:
  993         pshufd  $128,%xmm6,%xmm1
  994         pshufd  $254,%xmm7,%xmm0
  995         pxor    %xmm1,%xmm6
  996         pxor    %xmm1,%xmm1
  997         pxor    %xmm0,%xmm6
  998         movdqa  %xmm6,%xmm0
  999         movhlps %xmm1,%xmm6
 1000         ret
 1001 .size   _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear
 1002 .type   _vpaes_schedule_round,@function
 1003 .align  16
 1004 _vpaes_schedule_round:
 1005         movdqa  8(%esp),%xmm2
 1006         pxor    %xmm1,%xmm1
 1007 .byte   102,15,58,15,202,15
 1008 .byte   102,15,58,15,210,15
 1009         pxor    %xmm1,%xmm7
 1010         pshufd  $255,%xmm0,%xmm0
 1011 .byte   102,15,58,15,192,1
 1012         movdqa  %xmm2,8(%esp)
 1013 .L_vpaes_schedule_low_round:
 1014         movdqa  %xmm7,%xmm1
 1015         pslldq  $4,%xmm7
 1016         pxor    %xmm1,%xmm7
 1017         movdqa  %xmm7,%xmm1
 1018         pslldq  $8,%xmm7
 1019         pxor    %xmm1,%xmm7
 1020         pxor    336(%ebp),%xmm7
 1021         movdqa  -16(%ebp),%xmm4
 1022         movdqa  -48(%ebp),%xmm5
 1023         movdqa  %xmm4,%xmm1
 1024         pandn   %xmm0,%xmm1
 1025         psrld   $4,%xmm1
 1026         pand    %xmm4,%xmm0
 1027         movdqa  -32(%ebp),%xmm2
 1028 .byte   102,15,56,0,208
 1029         pxor    %xmm1,%xmm0
 1030         movdqa  %xmm5,%xmm3
 1031 .byte   102,15,56,0,217
 1032         pxor    %xmm2,%xmm3
 1033         movdqa  %xmm5,%xmm4
 1034 .byte   102,15,56,0,224
 1035         pxor    %xmm2,%xmm4
 1036         movdqa  %xmm5,%xmm2
 1037 .byte   102,15,56,0,211
 1038         pxor    %xmm0,%xmm2
 1039         movdqa  %xmm5,%xmm3
 1040 .byte   102,15,56,0,220
 1041         pxor    %xmm1,%xmm3
 1042         movdqa  32(%ebp),%xmm4
 1043 .byte   102,15,56,0,226
 1044         movdqa  48(%ebp),%xmm0
 1045 .byte   102,15,56,0,195
 1046         pxor    %xmm4,%xmm0
 1047         pxor    %xmm7,%xmm0
 1048         movdqa  %xmm0,%xmm7
 1049         ret
 1050 .size   _vpaes_schedule_round,.-_vpaes_schedule_round
 1051 .type   _vpaes_schedule_transform,@function
 1052 .align  16
 1053 _vpaes_schedule_transform:
 1054         movdqa  -16(%ebp),%xmm2
 1055         movdqa  %xmm2,%xmm1
 1056         pandn   %xmm0,%xmm1
 1057         psrld   $4,%xmm1
 1058         pand    %xmm2,%xmm0
 1059         movdqa  (%ebx),%xmm2
 1060 .byte   102,15,56,0,208
 1061         movdqa  16(%ebx),%xmm0
 1062 .byte   102,15,56,0,193
 1063         pxor    %xmm2,%xmm0
 1064         ret
 1065 .size   _vpaes_schedule_transform,.-_vpaes_schedule_transform
 1066 .type   _vpaes_schedule_mangle,@function
 1067 .align  16
 1068 _vpaes_schedule_mangle:
 1069         movdqa  %xmm0,%xmm4
 1070         movdqa  128(%ebp),%xmm5
 1071         testl   %edi,%edi
 1072         jnz     .L014schedule_mangle_dec
 1073         addl    $16,%edx
 1074         pxor    336(%ebp),%xmm4
 1075 .byte   102,15,56,0,229
 1076         movdqa  %xmm4,%xmm3
 1077 .byte   102,15,56,0,229
 1078         pxor    %xmm4,%xmm3
 1079 .byte   102,15,56,0,229
 1080         pxor    %xmm4,%xmm3
 1081         jmp     .L015schedule_mangle_both
 1082 .align  16
 1083 .L014schedule_mangle_dec:
 1084         movdqa  -16(%ebp),%xmm2
 1085         leal    416(%ebp),%esi
 1086         movdqa  %xmm2,%xmm1
 1087         pandn   %xmm4,%xmm1
 1088         psrld   $4,%xmm1
 1089         pand    %xmm2,%xmm4
 1090         movdqa  (%esi),%xmm2
 1091 .byte   102,15,56,0,212
 1092         movdqa  16(%esi),%xmm3
 1093 .byte   102,15,56,0,217
 1094         pxor    %xmm2,%xmm3
 1095 .byte   102,15,56,0,221
 1096         movdqa  32(%esi),%xmm2
 1097 .byte   102,15,56,0,212
 1098         pxor    %xmm3,%xmm2
 1099         movdqa  48(%esi),%xmm3
 1100 .byte   102,15,56,0,217
 1101         pxor    %xmm2,%xmm3
 1102 .byte   102,15,56,0,221
 1103         movdqa  64(%esi),%xmm2
 1104 .byte   102,15,56,0,212
 1105         pxor    %xmm3,%xmm2
 1106         movdqa  80(%esi),%xmm3
 1107 .byte   102,15,56,0,217
 1108         pxor    %xmm2,%xmm3
 1109 .byte   102,15,56,0,221
 1110         movdqa  96(%esi),%xmm2
 1111 .byte   102,15,56,0,212
 1112         pxor    %xmm3,%xmm2
 1113         movdqa  112(%esi),%xmm3
 1114 .byte   102,15,56,0,217
 1115         pxor    %xmm2,%xmm3
 1116         addl    $-16,%edx
 1117 .L015schedule_mangle_both:
 1118         movdqa  256(%ebp,%ecx,1),%xmm1
 1119 .byte   102,15,56,0,217
 1120         addl    $-16,%ecx
 1121         andl    $48,%ecx
 1122         movdqu  %xmm3,(%edx)
 1123         ret
 1124 .size   _vpaes_schedule_mangle,.-_vpaes_schedule_mangle
 1125 .globl  vpaes_set_encrypt_key
 1126 .type   vpaes_set_encrypt_key,@function
 1127 .align  16
 1128 vpaes_set_encrypt_key:
 1129 .L_vpaes_set_encrypt_key_begin:
 1130         pushl   %ebp
 1131         pushl   %ebx
 1132         pushl   %esi
 1133         pushl   %edi
 1134         movl    20(%esp),%esi
 1135         leal    -56(%esp),%ebx
 1136         movl    24(%esp),%eax
 1137         andl    $-16,%ebx
 1138         movl    28(%esp),%edx
 1139         xchgl   %esp,%ebx
 1140         movl    %ebx,48(%esp)
 1141         movl    %eax,%ebx
 1142         shrl    $5,%ebx
 1143         addl    $5,%ebx
 1144         movl    %ebx,240(%edx)
 1145         movl    $48,%ecx
 1146         movl    $0,%edi
 1147         leal    .L_vpaes_consts+0x30-.L016pic_point,%ebp
 1148         call    _vpaes_schedule_core
 1149 .L016pic_point:
 1150         movl    48(%esp),%esp
 1151         xorl    %eax,%eax
 1152         popl    %edi
 1153         popl    %esi
 1154         popl    %ebx
 1155         popl    %ebp
 1156         ret
 1157 .size   vpaes_set_encrypt_key,.-.L_vpaes_set_encrypt_key_begin
 1158 .globl  vpaes_set_decrypt_key
 1159 .type   vpaes_set_decrypt_key,@function
 1160 .align  16
 1161 vpaes_set_decrypt_key:
 1162 .L_vpaes_set_decrypt_key_begin:
 1163         pushl   %ebp
 1164         pushl   %ebx
 1165         pushl   %esi
 1166         pushl   %edi
 1167         movl    20(%esp),%esi
 1168         leal    -56(%esp),%ebx
 1169         movl    24(%esp),%eax
 1170         andl    $-16,%ebx
 1171         movl    28(%esp),%edx
 1172         xchgl   %esp,%ebx
 1173         movl    %ebx,48(%esp)
 1174         movl    %eax,%ebx
 1175         shrl    $5,%ebx
 1176         addl    $5,%ebx
 1177         movl    %ebx,240(%edx)
 1178         shll    $4,%ebx
 1179         leal    16(%edx,%ebx,1),%edx
 1180         movl    $1,%edi
 1181         movl    %eax,%ecx
 1182         shrl    $1,%ecx
 1183         andl    $32,%ecx
 1184         xorl    $32,%ecx
 1185         leal    .L_vpaes_consts+0x30-.L017pic_point,%ebp
 1186         call    _vpaes_schedule_core
 1187 .L017pic_point:
 1188         movl    48(%esp),%esp
 1189         xorl    %eax,%eax
 1190         popl    %edi
 1191         popl    %esi
 1192         popl    %ebx
 1193         popl    %ebp
 1194         ret
 1195 .size   vpaes_set_decrypt_key,.-.L_vpaes_set_decrypt_key_begin
 1196 .globl  vpaes_encrypt
 1197 .type   vpaes_encrypt,@function
 1198 .align  16
 1199 vpaes_encrypt:
 1200 .L_vpaes_encrypt_begin:
 1201         pushl   %ebp
 1202         pushl   %ebx
 1203         pushl   %esi
 1204         pushl   %edi
 1205         leal    .L_vpaes_consts+0x30-.L018pic_point,%ebp
 1206         call    _vpaes_preheat
 1207 .L018pic_point:
 1208         movl    20(%esp),%esi
 1209         leal    -56(%esp),%ebx
 1210         movl    24(%esp),%edi
 1211         andl    $-16,%ebx
 1212         movl    28(%esp),%edx
 1213         xchgl   %esp,%ebx
 1214         movl    %ebx,48(%esp)
 1215         movdqu  (%esi),%xmm0
 1216         call    _vpaes_encrypt_core
 1217         movdqu  %xmm0,(%edi)
 1218         movl    48(%esp),%esp
 1219         popl    %edi
 1220         popl    %esi
 1221         popl    %ebx
 1222         popl    %ebp
 1223         ret
 1224 .size   vpaes_encrypt,.-.L_vpaes_encrypt_begin
 1225 .globl  vpaes_decrypt
 1226 .type   vpaes_decrypt,@function
 1227 .align  16
 1228 vpaes_decrypt:
 1229 .L_vpaes_decrypt_begin:
 1230         pushl   %ebp
 1231         pushl   %ebx
 1232         pushl   %esi
 1233         pushl   %edi
 1234         leal    .L_vpaes_consts+0x30-.L019pic_point,%ebp
 1235         call    _vpaes_preheat
 1236 .L019pic_point:
 1237         movl    20(%esp),%esi
 1238         leal    -56(%esp),%ebx
 1239         movl    24(%esp),%edi
 1240         andl    $-16,%ebx
 1241         movl    28(%esp),%edx
 1242         xchgl   %esp,%ebx
 1243         movl    %ebx,48(%esp)
 1244         movdqu  (%esi),%xmm0
 1245         call    _vpaes_decrypt_core
 1246         movdqu  %xmm0,(%edi)
 1247         movl    48(%esp),%esp
 1248         popl    %edi
 1249         popl    %esi
 1250         popl    %ebx
 1251         popl    %ebp
 1252         ret
 1253 .size   vpaes_decrypt,.-.L_vpaes_decrypt_begin
 1254 .globl  vpaes_cbc_encrypt
 1255 .type   vpaes_cbc_encrypt,@function
 1256 .align  16
 1257 vpaes_cbc_encrypt:
 1258 .L_vpaes_cbc_encrypt_begin:
 1259         pushl   %ebp
 1260         pushl   %ebx
 1261         pushl   %esi
 1262         pushl   %edi
 1263         movl    20(%esp),%esi
 1264         movl    24(%esp),%edi
 1265         movl    28(%esp),%eax
 1266         movl    32(%esp),%edx
 1267         subl    $16,%eax
 1268         jc      .L020cbc_abort
 1269         leal    -56(%esp),%ebx
 1270         movl    36(%esp),%ebp
 1271         andl    $-16,%ebx
 1272         movl    40(%esp),%ecx
 1273         xchgl   %esp,%ebx
 1274         movdqu  (%ebp),%xmm1
 1275         subl    %esi,%edi
 1276         movl    %ebx,48(%esp)
 1277         movl    %edi,(%esp)
 1278         movl    %edx,4(%esp)
 1279         movl    %ebp,8(%esp)
 1280         movl    %eax,%edi
 1281         leal    .L_vpaes_consts+0x30-.L021pic_point,%ebp
 1282         call    _vpaes_preheat
 1283 .L021pic_point:
 1284         cmpl    $0,%ecx
 1285         je      .L022cbc_dec_loop
 1286         jmp     .L023cbc_enc_loop
 1287 .align  16
 1288 .L023cbc_enc_loop:
 1289         movdqu  (%esi),%xmm0
 1290         pxor    %xmm1,%xmm0
 1291         call    _vpaes_encrypt_core
 1292         movl    (%esp),%ebx
 1293         movl    4(%esp),%edx
 1294         movdqa  %xmm0,%xmm1
 1295         movdqu  %xmm0,(%ebx,%esi,1)
 1296         leal    16(%esi),%esi
 1297         subl    $16,%edi
 1298         jnc     .L023cbc_enc_loop
 1299         jmp     .L024cbc_done
 1300 .align  16
 1301 .L022cbc_dec_loop:
 1302         movdqu  (%esi),%xmm0
 1303         movdqa  %xmm1,16(%esp)
 1304         movdqa  %xmm0,32(%esp)
 1305         call    _vpaes_decrypt_core
 1306         movl    (%esp),%ebx
 1307         movl    4(%esp),%edx
 1308         pxor    16(%esp),%xmm0
 1309         movdqa  32(%esp),%xmm1
 1310         movdqu  %xmm0,(%ebx,%esi,1)
 1311         leal    16(%esi),%esi
 1312         subl    $16,%edi
 1313         jnc     .L022cbc_dec_loop
 1314 .L024cbc_done:
 1315         movl    8(%esp),%ebx
 1316         movl    48(%esp),%esp
 1317         movdqu  %xmm1,(%ebx)
 1318 .L020cbc_abort:
 1319         popl    %edi
 1320         popl    %esi
 1321         popl    %ebx
 1322         popl    %ebp
 1323         ret
 1324 .size   vpaes_cbc_encrypt,.-.L_vpaes_cbc_encrypt_begin
 1325 #endif

Cache object: 6ae6cf2031d640a5f2a1d34a9acf009c


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]


This page is part of the FreeBSD/Linux Linux Kernel Cross-Reference, and was automatically generated using a modified version of the LXR engine.