[llvm-commits] [parallel] CVS: llvm/runtime/zlib/contrib/inflate86/inffas86.c inffast.S
Misha Brukman
brukman at cs.uiuc.edu
Mon Mar 1 18:17:31 PST 2004
Changes in directory llvm/runtime/zlib/contrib/inflate86:
inffas86.c added (r1.1.2.1)
inffast.S added (r1.1.2.1)
---
Log message:
Merge from trunk
---
Diffs of the changes: (+2160 -0)
Index: llvm/runtime/zlib/contrib/inflate86/inffas86.c
diff -c /dev/null llvm/runtime/zlib/contrib/inflate86/inffas86.c:1.1.2.1
*** /dev/null Mon Mar 1 17:58:58 2004
--- llvm/runtime/zlib/contrib/inflate86/inffas86.c Mon Mar 1 17:58:48 2004
***************
*** 0 ****
--- 1,783 ----
+ /* inffas86.c is a hand tuned assembler version of
+ *
+ * inffast.c -- fast decoding
+ * Copyright (C) 1995-2003 Mark Adler
+ * For conditions of distribution and use, see copyright notice in zlib.h
+ *
+ * Copyright (C) 2003 Chris Anderson <christop at charm.net>
+ * Please use the copyright conditions above.
+ *
+ * Mar-13-2003 -- Most of this is derived from inffast.S which is derived from
+ * the gcc -S output of zlib-1.2.0/inffast.c. Zlib-1.2.0 is in beta release at
+ * the moment. I have successfully compiled and tested this code with gcc2.96,
+ * gcc3.2, icc5.0, msvc6.0. It is very close to the speed of inffast.S
+ * compiled with gcc -DNO_MMX, but inffast.S is still faster on the P3 with MMX
+ * enabled. I will attempt to merge the MMX code into this version. Newer
+ * versions of this and inffast.S can be found at
+ * http://www.eetbeetee.com/zlib/ and http://www.charm.net/~christop/zlib/
+ */
+
+ #include "zutil.h"
+ #include "inftrees.h"
+ #include "inflate.h"
+ #include "inffast.h"
+
+ /* Mark Adler's comments from inffast.c: */
+
+ /*
+ Decode literal, length, and distance codes and write out the resulting
+ literal and match bytes until either not enough input or output is
+ available, an end-of-block is encountered, or a data error is encountered.
+ When large enough input and output buffers are supplied to inflate(), for
+ example, a 16K input buffer and a 64K output buffer, more than 95% of the
+ inflate execution time is spent in this routine.
+
+ Entry assumptions:
+
+ state->mode == LEN
+ strm->avail_in >= 6
+ strm->avail_out >= 258
+ start >= strm->avail_out
+ state->bits < 8
+
+ On return, state->mode is one of:
+
+ LEN -- ran out of enough output space or enough available input
+ TYPE -- reached end of block code, inflate() to interpret next block
+ BAD -- error in block data
+
+ Notes:
+
+ - The maximum input bits used by a length/distance pair is 15 bits for the
+ length code, 5 bits for the length extra, 15 bits for the distance code,
+ and 13 bits for the distance extra. This totals 48 bits, or six bytes.
+ Therefore if strm->avail_in >= 6, then there is enough input to avoid
+ checking for available input while decoding.
+
+ - The maximum bytes that a single length/distance pair can output is 258
+ bytes, which is the maximum length that can be coded. inflate_fast()
+ requires strm->avail_out >= 258 for each loop to avoid checking for
+ output space.
+ */
+ void inflate_fast(strm, start)
+ z_streamp strm;
+ unsigned start; /* inflate()'s starting value for strm->avail_out */
+ {
+ struct inflate_state FAR *state;
+ struct inffast_ar {
+ void *esp; /* esp save */
+ unsigned char FAR *in; /* local strm->next_in */
+ unsigned char FAR *last; /* while in < last, enough input available */
+ unsigned char FAR *out; /* local strm->next_out */
+ unsigned char FAR *beg; /* inflate()'s initial strm->next_out */
+ unsigned char FAR *end; /* while out < end, enough space available */
+ unsigned wsize; /* window size or zero if not using window */
+ unsigned write; /* window write index */
+ unsigned char FAR *window; /* allocated sliding window, if wsize != 0 */
+ unsigned long hold; /* local strm->hold */
+ unsigned bits; /* local strm->bits */
+ code const FAR *lcode; /* local strm->lencode */
+ code const FAR *dcode; /* local strm->distcode */
+ unsigned lmask; /* mask for first level of length codes */
+ unsigned dmask; /* mask for first level of distance codes */
+ unsigned len; /* match length, unused bytes */
+ unsigned dist; /* match distance */
+ unsigned status; /* this is set when state changes */
+ } ar;
+
+ /* copy state to local variables */
+ state = (struct inflate_state FAR *)strm->state;
+ ar.in = strm->next_in;
+ ar.last = ar.in + (strm->avail_in - 5);
+ ar.out = strm->next_out;
+ ar.beg = ar.out - (start - strm->avail_out);
+ ar.end = ar.out + (strm->avail_out - 257);
+ ar.wsize = state->wsize;
+ ar.write = state->write;
+ ar.window = state->window;
+ ar.hold = state->hold;
+ ar.bits = state->bits;
+ ar.lcode = state->lencode;
+ ar.dcode = state->distcode;
+ ar.lmask = (1U << state->lenbits) - 1;
+ ar.dmask = (1U << state->distbits) - 1;
+
+ /* decode literals and length/distances until end-of-block or not enough
+ input data or output space */
+
+ /* align in on 2 byte boundary */
+ if (((unsigned long)(void *)ar.in & 0x1) != 0) {
+ ar.hold += (unsigned long)*ar.in++ << ar.bits;
+ ar.bits += 8;
+ }
+
+ #if defined( __GNUC__ ) || defined( __ICC )
+ __asm__ __volatile__ (
+ " leal %0, %%eax\n"
+ " pushf\n"
+ " pushl %%ebp\n"
+ " movl %%esp, (%%eax)\n"
+ " movl %%eax, %%esp\n"
+ " movl 4(%%esp), %%esi\n" /* esi = in */
+ " movl 12(%%esp), %%edi\n" /* edi = out */
+ " movl 36(%%esp), %%edx\n" /* edx = hold */
+ " movl 40(%%esp), %%ebx\n" /* ebx = bits */
+ " movl 44(%%esp), %%ebp\n" /* ebp = lcode */
+
+ " cld\n"
+ " jmp .L_do_loop\n"
+
+ ".L_while_test:\n"
+ " cmpl %%edi, 20(%%esp)\n"
+ " jbe .L_break_loop\n"
+ " cmpl %%esi, 8(%%esp)\n"
+ " jbe .L_break_loop\n"
+
+ ".L_do_loop:\n"
+ " cmpb $15, %%bl\n"
+ " ja .L_get_length_code\n" /* if (15 < bits) */
+
+ " xorl %%eax, %%eax\n"
+ " lodsw\n" /* al = *(ushort *)in++ */
+ " movb %%bl, %%cl\n" /* cl = bits, needs it for shifting */
+ " addb $16, %%bl\n" /* bits += 16 */
+ " shll %%cl, %%eax\n"
+ " orl %%eax, %%edx\n" /* hold |= *((ushort *)in)++ << bits */
+
+ ".L_get_length_code:\n"
+ " movl 52(%%esp), %%eax\n" /* eax = lmask */
+ " andl %%edx, %%eax\n" /* eax &= hold */
+ " movl (%%ebp,%%eax,4), %%eax\n" /* eax = lcode[hold & lmask] */
+
+ ".L_dolen:\n"
+ " movb %%ah, %%cl\n" /* cl = this.bits */
+ " subb %%ah, %%bl\n" /* bits -= this.bits */
+ " shrl %%cl, %%edx\n" /* hold >>= this.bits */
+
+ " testb %%al, %%al\n"
+ " jnz .L_test_for_length_base\n" /* if (op != 0) 45.7% */
+
+ " shrl $16, %%eax\n" /* output this.val char */
+ " stosb\n"
+ " jmp .L_while_test\n"
+
+ ".L_test_for_length_base:\n"
+ " movl %%eax, %%ecx\n" /* len = this */
+ " shrl $16, %%ecx\n" /* len = this.val */
+ " movl %%ecx, 60(%%esp)\n" /* len = this */
+ " movb %%al, %%cl\n"
+
+ " testb $16, %%al\n"
+ " jz .L_test_for_second_level_length\n" /* if ((op & 16) == 0) 8% */
+ " andb $15, %%cl\n" /* op &= 15 */
+ " jz .L_decode_distance\n" /* if (!op) */
+ " cmpb %%cl, %%bl\n"
+ " jae .L_add_bits_to_len\n" /* if (op <= bits) */
+
+ " movb %%cl, %%ch\n" /* stash op in ch, freeing cl */
+ " xorl %%eax, %%eax\n"
+ " lodsw\n" /* al = *(ushort *)in++ */
+ " movb %%bl, %%cl\n" /* cl = bits, needs it for shifting */
+ " addb $16, %%bl\n" /* bits += 16 */
+ " shll %%cl, %%eax\n"
+ " orl %%eax, %%edx\n" /* hold |= *((ushort *)in)++ << bits */
+ " movb %%ch, %%cl\n" /* move op back to ecx */
+
+ ".L_add_bits_to_len:\n"
+ " movl $1, %%eax\n"
+ " shll %%cl, %%eax\n"
+ " decl %%eax\n"
+ " subb %%cl, %%bl\n"
+ " andl %%edx, %%eax\n" /* eax &= hold */
+ " shrl %%cl, %%edx\n"
+ " addl %%eax, 60(%%esp)\n" /* len += hold & mask[op] */
+
+ ".L_decode_distance:\n"
+ " cmpb $15, %%bl\n"
+ " ja .L_get_distance_code\n" /* if (15 < bits) */
+
+ " xorl %%eax, %%eax\n"
+ " lodsw\n" /* al = *(ushort *)in++ */
+ " movb %%bl, %%cl\n" /* cl = bits, needs it for shifting */
+ " addb $16, %%bl\n" /* bits += 16 */
+ " shll %%cl, %%eax\n"
+ " orl %%eax, %%edx\n" /* hold |= *((ushort *)in)++ << bits */
+
+ ".L_get_distance_code:\n"
+ " movl 56(%%esp), %%eax\n" /* eax = dmask */
+ " movl 48(%%esp), %%ecx\n" /* ecx = dcode */
+ " andl %%edx, %%eax\n" /* eax &= hold */
+ " movl (%%ecx,%%eax,4), %%eax\n"/* eax = dcode[hold & dmask] */
+
+ ".L_dodist:\n"
+ " movl %%eax, %%ebp\n" /* dist = this */
+ " shrl $16, %%ebp\n" /* dist = this.val */
+ " movb %%ah, %%cl\n"
+ " subb %%ah, %%bl\n" /* bits -= this.bits */
+ " shrl %%cl, %%edx\n" /* hold >>= this.bits */
+ " movb %%al, %%cl\n" /* cl = this.op */
+
+ " testb $16, %%al\n" /* if ((op & 16) == 0) */
+ " jz .L_test_for_second_level_dist\n"
+ " andb $15, %%cl\n" /* op &= 15 */
+ " jz .L_check_dist_one\n"
+ " cmpb %%cl, %%bl\n"
+ " jae .L_add_bits_to_dist\n" /* if (op <= bits) 97.6% */
+
+ " movb %%cl, %%ch\n" /* stash op in ch, freeing cl */
+ " xorl %%eax, %%eax\n"
+ " lodsw\n" /* al = *(ushort *)in++ */
+ " movb %%bl, %%cl\n" /* cl = bits, needs it for shifting */
+ " addb $16, %%bl\n" /* bits += 16 */
+ " shll %%cl, %%eax\n"
+ " orl %%eax, %%edx\n" /* hold |= *((ushort *)in)++ << bits */
+ " movb %%ch, %%cl\n" /* move op back to ecx */
+
+ ".L_add_bits_to_dist:\n"
+ " movl $1, %%eax\n"
+ " shll %%cl, %%eax\n"
+ " decl %%eax\n" /* (1 << op) - 1 */
+ " subb %%cl, %%bl\n"
+ " andl %%edx, %%eax\n" /* eax &= hold */
+ " shrl %%cl, %%edx\n"
+ " addl %%eax, %%ebp\n" /* dist += hold & ((1 << op) - 1) */
+
+ ".L_check_window:\n"
+ " movl %%esi, 4(%%esp)\n" /* save in so from can use it's reg */
+ " movl %%edi, %%eax\n"
+ " subl 16(%%esp), %%eax\n" /* nbytes = out - beg */
+
+ " cmpl %%ebp, %%eax\n"
+ " jb .L_clip_window\n" /* if (dist > nbytes) 4.2% */
+
+ " movl 60(%%esp), %%ecx\n"
+ " movl %%edi, %%esi\n"
+ " subl %%ebp, %%esi\n" /* from = out - dist */
+
+ " subl $3, %%ecx\n" /* copy from to out */
+ " movb (%%esi), %%al\n"
+ " movb %%al, (%%edi)\n"
+ " movb 1(%%esi), %%al\n"
+ " movb 2(%%esi), %%ah\n"
+ " addl $3, %%esi\n"
+ " movb %%al, 1(%%edi)\n"
+ " movb %%ah, 2(%%edi)\n"
+ " addl $3, %%edi\n"
+ " rep movsb\n"
+
+ " movl 4(%%esp), %%esi\n" /* move in back to %esi, toss from */
+ " movl 44(%%esp), %%ebp\n" /* ebp = lcode */
+ " jmp .L_while_test\n"
+
+ ".L_check_dist_one:\n"
+ " cmpl $1, %%ebp\n" /* if dist 1, is a memset */
+ " jne .L_check_window\n"
+ " cmpl %%edi, 16(%%esp)\n"
+ " je .L_check_window\n"
+
+ " decl %%edi\n"
+ " movl 60(%%esp), %%ecx\n"
+ " movb (%%edi), %%al\n"
+ " subl $3, %%ecx\n"
+
+ " movb %%al, 1(%%edi)\n" /* memset out with from[-1] */
+ " movb %%al, 2(%%edi)\n"
+ " movb %%al, 3(%%edi)\n"
+ " addl $4, %%edi\n"
+ " rep stosb\n"
+ " movl 44(%%esp), %%ebp\n" /* ebp = lcode */
+ " jmp .L_while_test\n"
+
+ ".L_test_for_second_level_length:\n"
+ " testb $64, %%al\n"
+ " jnz .L_test_for_end_of_block\n" /* if ((op & 64) != 0) */
+
+ " movl $1, %%eax\n"
+ " shll %%cl, %%eax\n"
+ " decl %%eax\n"
+ " andl %%edx, %%eax\n" /* eax &= hold */
+ " addl 60(%%esp), %%eax\n" /* eax += this.val */
+ " movl (%%ebp,%%eax,4), %%eax\n" /* eax = lcode[val+(hold&mask[op])]*/
+ " jmp .L_dolen\n"
+
+ ".L_test_for_second_level_dist:\n"
+ " testb $64, %%al\n"
+ " jnz .L_invalid_distance_code\n" /* if ((op & 64) != 0) */
+
+ " movl $1, %%eax\n"
+ " shll %%cl, %%eax\n"
+ " decl %%eax\n"
+ " andl %%edx, %%eax\n" /* eax &= hold */
+ " addl %%ebp, %%eax\n" /* eax += this.val */
+ " movl 48(%%esp), %%ecx\n" /* ecx = dcode */
+ " movl (%%ecx,%%eax,4), %%eax\n" /* eax = dcode[val+(hold&mask[op])]*/
+ " jmp .L_dodist\n"
+
+ ".L_clip_window:\n"
+ " movl %%eax, %%ecx\n"
+ " movl 24(%%esp), %%eax\n" /* prepare for dist compare */
+ " negl %%ecx\n" /* nbytes = -nbytes */
+ " movl 32(%%esp), %%esi\n" /* from = window */
+
+ " cmpl %%ebp, %%eax\n"
+ " jb .L_invalid_distance_too_far\n" /* if (dist > wsize) */
+
+ " addl %%ebp, %%ecx\n" /* nbytes = dist - nbytes */
+ " cmpl $0, 28(%%esp)\n"
+ " jne .L_wrap_around_window\n" /* if (write != 0) */
+
+ " subl %%ecx, %%eax\n"
+ " addl %%eax, %%esi\n" /* from += wsize - nbytes */
+
+ " movl 60(%%esp), %%eax\n"
+ " cmpl %%ecx, %%eax\n"
+ " jbe .L_do_copy1\n" /* if (nbytes >= len) */
+
+ " subl %%ecx, %%eax\n" /* len -= nbytes */
+ " rep movsb\n"
+ " movl %%edi, %%esi\n"
+ " subl %%ebp, %%esi\n" /* from = out - dist */
+ " jmp .L_do_copy1\n"
+
+ " cmpl %%ecx, %%eax\n"
+ " jbe .L_do_copy1\n" /* if (nbytes >= len) */
+
+ " subl %%ecx, %%eax\n" /* len -= nbytes */
+ " rep movsb\n"
+ " movl %%edi, %%esi\n"
+ " subl %%ebp, %%esi\n" /* from = out - dist */
+ " jmp .L_do_copy1\n"
+
+ ".L_wrap_around_window:\n"
+ " movl 28(%%esp), %%eax\n"
+ " cmpl %%eax, %%ecx\n"
+ " jbe .L_contiguous_in_window\n" /* if (write >= nbytes) */
+
+ " addl 24(%%esp), %%esi\n"
+ " addl %%eax, %%esi\n"
+ " subl %%ecx, %%esi\n" /* from += wsize + write - nbytes */
+ " subl %%eax, %%ecx\n" /* nbytes -= write */
+
+ " movl 60(%%esp), %%eax\n"
+ " cmpl %%ecx, %%eax\n"
+ " jbe .L_do_copy1\n" /* if (nbytes >= len) */
+
+ " subl %%ecx, %%eax\n" /* len -= nbytes */
+ " rep movsb\n"
+ " movl 32(%%esp), %%esi\n" /* from = window */
+ " movl 28(%%esp), %%ecx\n" /* nbytes = write */
+ " cmpl %%ecx, %%eax\n"
+ " jbe .L_do_copy1\n" /* if (nbytes >= len) */
+
+ " subl %%ecx, %%eax\n" /* len -= nbytes */
+ " rep movsb\n"
+ " movl %%edi, %%esi\n"
+ " subl %%ebp, %%esi\n" /* from = out - dist */
+ " jmp .L_do_copy1\n"
+
+ ".L_contiguous_in_window:\n"
+ " addl %%eax, %%esi\n"
+ " subl %%ecx, %%esi\n" /* from += write - nbytes */
+
+ " movl 60(%%esp), %%eax\n"
+ " cmpl %%ecx, %%eax\n"
+ " jbe .L_do_copy1\n" /* if (nbytes >= len) */
+
+ " subl %%ecx, %%eax\n" /* len -= nbytes */
+ " rep movsb\n"
+ " movl %%edi, %%esi\n"
+ " subl %%ebp, %%esi\n" /* from = out - dist */
+
+ ".L_do_copy1:\n"
+ " movl %%eax, %%ecx\n"
+ " rep movsb\n"
+
+ " movl 4(%%esp), %%esi\n" /* move in back to %esi, toss from */
+ " movl 44(%%esp), %%ebp\n" /* ebp = lcode */
+ " jmp .L_while_test\n"
+
+ ".L_test_for_end_of_block:\n"
+ " testb $32, %%al\n"
+ " jz .L_invalid_literal_length_code\n"
+ " movl $1, 68(%%esp)\n"
+ " jmp .L_break_loop_with_status\n"
+
+ ".L_invalid_literal_length_code:\n"
+ " movl $2, 68(%%esp)\n"
+ " jmp .L_break_loop_with_status\n"
+
+ ".L_invalid_distance_code:\n"
+ " movl $3, 68(%%esp)\n"
+ " jmp .L_break_loop_with_status\n"
+
+ ".L_invalid_distance_too_far:\n"
+ " movl 4(%%esp), %%esi\n"
+ " movl $4, 68(%%esp)\n"
+ " jmp .L_break_loop_with_status\n"
+
+ ".L_break_loop:\n"
+ " movl $0, 68(%%esp)\n"
+
+ ".L_break_loop_with_status:\n"
+ /* put in, out, bits, and hold back into ar and pop esp */
+ " movl %%esi, 4(%%esp)\n"
+ " movl %%edi, 12(%%esp)\n"
+ " movl %%ebx, 40(%%esp)\n"
+ " movl %%edx, 36(%%esp)\n"
+ " movl (%%esp), %%esp\n"
+ " popl %%ebp\n"
+ " popf\n"
+ :
+ : "m" (ar)
+ : "memory", "%eax", "%ebx", "%ecx", "%edx", "%esi", "%edi"
+ );
+ #elif defined( _MSC_VER )
+ __asm {
+ lea eax, ar
+ pushfd
+ push ebp
+ mov [eax], esp
+ mov esp, eax
+ mov esi, [esp+4] /* esi = in */
+ mov edi, [esp+12] /* edi = out */
+ mov edx, [esp+36] /* edx = hold */
+ mov ebx, [esp+40] /* ebx = bits */
+ mov ebp, [esp+44] /* ebp = lcode */
+
+ cld
+ jmp L_do_loop
+
+ L_while_test:
+ cmp [esp+20], edi
+ jbe L_break_loop
+ cmp [esp+8], esi
+ jbe L_break_loop
+
+ L_do_loop:
+ cmp bl, 15
+ ja L_get_length_code /* if (15 < bits) */
+
+ xor eax, eax
+ lodsw /* al = *(ushort *)in++ */
+ mov cl, bl /* cl = bits, needs it for shifting */
+ add bl, 16 /* bits += 16 */
+ shl eax, cl
+ or edx, eax /* hold |= *((ushort *)in)++ << bits */
+
+ L_get_length_code:
+ mov eax, [esp+52] /* eax = lmask */
+ and eax, edx /* eax &= hold */
+ mov eax, [ebp+eax*4] /* eax = lcode[hold & lmask] */
+
+ L_dolen:
+ mov cl, ah /* cl = this.bits */
+ sub bl, ah /* bits -= this.bits */
+ shr edx, cl /* hold >>= this.bits */
+
+ test al, al
+ jnz L_test_for_length_base /* if (op != 0) 45.7% */
+
+ shr eax, 16 /* output this.val char */
+ stosb
+ jmp L_while_test
+
+ L_test_for_length_base:
+ mov ecx, eax /* len = this */
+ shr ecx, 16 /* len = this.val */
+ mov [esp+60], ecx /* len = this */
+ mov cl, al
+
+ test al, 16
+ jz L_test_for_second_level_length /* if ((op & 16) == 0) 8% */
+ and cl, 15 /* op &= 15 */
+ jz L_decode_distance /* if (!op) */
+ cmp bl, cl
+ jae L_add_bits_to_len /* if (op <= bits) */
+
+ mov ch, cl /* stash op in ch, freeing cl */
+ xor eax, eax
+ lodsw /* al = *(ushort *)in++ */
+ mov cl, bl /* cl = bits, needs it for shifting */
+ add bl, 16 /* bits += 16 */
+ shl eax, cl
+ or edx, eax /* hold |= *((ushort *)in)++ << bits */
+ mov cl, ch /* move op back to ecx */
+
+ L_add_bits_to_len:
+ mov eax, 1
+ shl eax, cl
+ dec eax
+ sub bl, cl
+ and eax, edx /* eax &= hold */
+ shr edx, cl
+ add [esp+60], eax /* len += hold & mask[op] */
+
+ L_decode_distance:
+ cmp bl, 15
+ ja L_get_distance_code /* if (15 < bits) */
+
+ xor eax, eax
+ lodsw /* al = *(ushort *)in++ */
+ mov cl, bl /* cl = bits, needs it for shifting */
+ add bl, 16 /* bits += 16 */
+ shl eax, cl
+ or edx, eax /* hold |= *((ushort *)in)++ << bits */
+
+ L_get_distance_code:
+ mov eax, [esp+56] /* eax = dmask */
+ mov ecx, [esp+48] /* ecx = dcode */
+ and eax, edx /* eax &= hold */
+ mov eax, [ecx+eax*4]/* eax = dcode[hold & dmask] */
+
+ L_dodist:
+ mov ebp, eax /* dist = this */
+ shr ebp, 16 /* dist = this.val */
+ mov cl, ah
+ sub bl, ah /* bits -= this.bits */
+ shr edx, cl /* hold >>= this.bits */
+ mov cl, al /* cl = this.op */
+
+ test al, 16 /* if ((op & 16) == 0) */
+ jz L_test_for_second_level_dist
+ and cl, 15 /* op &= 15 */
+ jz L_check_dist_one
+ cmp bl, cl
+ jae L_add_bits_to_dist /* if (op <= bits) 97.6% */
+
+ mov ch, cl /* stash op in ch, freeing cl */
+ xor eax, eax
+ lodsw /* al = *(ushort *)in++ */
+ mov cl, bl /* cl = bits, needs it for shifting */
+ add bl, 16 /* bits += 16 */
+ shl eax, cl
+ or edx, eax /* hold |= *((ushort *)in)++ << bits */
+ mov cl, ch /* move op back to ecx */
+
+ L_add_bits_to_dist:
+ mov eax, 1
+ shl eax, cl
+ dec eax /* (1 << op) - 1 */
+ sub bl, cl
+ and eax, edx /* eax &= hold */
+ shr edx, cl
+ add ebp, eax /* dist += hold & ((1 << op) - 1) */
+
+ L_check_window:
+ mov [esp+4], esi /* save in so from can use it's reg */
+ mov eax, edi
+ sub eax, [esp+16] /* nbytes = out - beg */
+
+ cmp eax, ebp
+ jb L_clip_window /* if (dist > nbytes) 4.2% */
+
+ mov ecx, [esp+60]
+ mov esi, edi
+ sub esi, ebp /* from = out - dist */
+
+ sub ecx, 3 /* copy from to out */
+ mov al, [esi]
+ mov [edi], al
+ mov al, [esi+1]
+ mov ah, [esi+2]
+ add esi, 3
+ mov [edi+1], al
+ mov [edi+2], ah
+ add edi, 3
+ rep movsb
+
+ mov esi, [esp+4] /* move in back to %esi, toss from */
+ mov ebp, [esp+44] /* ebp = lcode */
+ jmp L_while_test
+
+ L_check_dist_one:
+ cmp ebp, 1 /* if dist 1, is a memset */
+ jne L_check_window
+ cmp [esp+16], edi
+ je L_check_window
+
+ dec edi
+ mov ecx, [esp+60]
+ mov al, [edi]
+ sub ecx, 3
+
+ mov [edi+1], al /* memset out with from[-1] */
+ mov [edi+2], al
+ mov [edi+3], al
+ add edi, 4
+ rep stosb
+ mov ebp, [esp+44] /* ebp = lcode */
+ jmp L_while_test
+
+ L_test_for_second_level_length:
+ test al, 64
+ jnz L_test_for_end_of_block /* if ((op & 64) != 0) */
+
+ mov eax, 1
+ shl eax, cl
+ dec eax
+ and eax, edx /* eax &= hold */
+ add eax, [esp+60] /* eax += this.val */
+ mov eax, [ebp+eax*4] /* eax = lcode[val+(hold&mask[op])]*/
+ jmp L_dolen
+
+ L_test_for_second_level_dist:
+ test al, 64
+ jnz L_invalid_distance_code /* if ((op & 64) != 0) */
+
+ mov eax, 1
+ shl eax, cl
+ dec eax
+ and eax, edx /* eax &= hold */
+ add eax, ebp /* eax += this.val */
+ mov ecx, [esp+48] /* ecx = dcode */
+ mov eax, [ecx+eax*4] /* eax = dcode[val+(hold&mask[op])]*/
+ jmp L_dodist
+
+ L_clip_window:
+ mov ecx, eax
+ mov eax, [esp+24] /* prepare for dist compare */
+ neg ecx /* nbytes = -nbytes */
+ mov esi, [esp+32] /* from = window */
+
+ cmp eax, ebp
+ jb L_invalid_distance_too_far /* if (dist > wsize) */
+
+ add ecx, ebp /* nbytes = dist - nbytes */
+ cmp dword ptr [esp+28], 0
+ jne L_wrap_around_window /* if (write != 0) */
+
+ sub eax, ecx
+ add esi, eax /* from += wsize - nbytes */
+
+ mov eax, [esp+60]
+ cmp eax, ecx
+ jbe L_do_copy1 /* if (nbytes >= len) */
+
+ sub eax, ecx /* len -= nbytes */
+ rep movsb
+ mov esi, edi
+ sub esi, ebp /* from = out - dist */
+ jmp L_do_copy1
+
+ cmp eax, ecx
+ jbe L_do_copy1 /* if (nbytes >= len) */
+
+ sub eax, ecx /* len -= nbytes */
+ rep movsb
+ mov esi, edi
+ sub esi, ebp /* from = out - dist */
+ jmp L_do_copy1
+
+ L_wrap_around_window:
+ mov eax, [esp+28]
+ cmp ecx, eax
+ jbe L_contiguous_in_window /* if (write >= nbytes) */
+
+ add esi, [esp+24]
+ add esi, eax
+ sub esi, ecx /* from += wsize + write - nbytes */
+ sub ecx, eax /* nbytes -= write */
+
+ mov eax, [esp+60]
+ cmp eax, ecx
+ jbe L_do_copy1 /* if (nbytes >= len) */
+
+ sub eax, ecx /* len -= nbytes */
+ rep movsb
+ mov esi, [esp+32] /* from = window */
+ mov ecx, [esp+28] /* nbytes = write */
+ cmp eax, ecx
+ jbe L_do_copy1 /* if (nbytes >= len) */
+
+ sub eax, ecx /* len -= nbytes */
+ rep movsb
+ mov esi, edi
+ sub esi, ebp /* from = out - dist */
+ jmp L_do_copy1
+
+ L_contiguous_in_window:
+ add esi, eax
+ sub esi, ecx /* from += write - nbytes */
+
+ mov eax, [esp+60]
+ cmp eax, ecx
+ jbe L_do_copy1 /* if (nbytes >= len) */
+
+ sub eax, ecx /* len -= nbytes */
+ rep movsb
+ mov esi, edi
+ sub esi, ebp /* from = out - dist */
+
+ L_do_copy1:
+ mov ecx, eax
+ rep movsb
+
+ mov esi, [esp+4] /* move in back to %esi, toss from */
+ mov ebp, [esp+44] /* ebp = lcode */
+ jmp L_while_test
+
+ L_test_for_end_of_block:
+ test al, 32
+ jz L_invalid_literal_length_code
+ mov dword ptr [esp+68], 1
+ jmp L_break_loop_with_status
+
+ L_invalid_literal_length_code:
+ mov dword ptr [esp+68], 2
+ jmp L_break_loop_with_status
+
+ L_invalid_distance_code:
+ mov dword ptr [esp+68], 3
+ jmp L_break_loop_with_status
+
+ L_invalid_distance_too_far:
+ mov esi, [esp+4]
+ mov dword ptr [esp+68], 4
+ jmp L_break_loop_with_status
+
+ L_break_loop:
+ mov dword ptr [esp+68], 0
+
+ L_break_loop_with_status:
+ /* put in, out, bits, and hold back into ar and pop esp */
+ mov [esp+4], esi
+ mov [esp+12], edi
+ mov [esp+40], ebx
+ mov [esp+36], edx
+ mov esp, [esp]
+ pop ebp
+ popfd
+ }
+ #endif
+
+ if (ar.status > 1) {
+ if (ar.status == 2)
+ strm->msg = "invalid literal/length code";
+ else if (ar.status == 3)
+ strm->msg = "invalid distance code";
+ else
+ strm->msg = "invalid distance too far back";
+ state->mode = BAD;
+ }
+ else if ( ar.status == 1 ) {
+ state->mode = TYPE;
+ }
+
+ /* return unused bytes (on entry, bits < 8, so in won't go too far back) */
+ ar.len = ar.bits >> 3;
+ ar.in -= ar.len;
+ ar.bits -= ar.len << 3;
+ ar.hold &= (1U << ar.bits) - 1;
+
+ /* update state and return */
+ strm->next_in = ar.in;
+ strm->next_out = ar.out;
+ strm->avail_in = (unsigned)(ar.in < ar.last ? 5 + (ar.last - ar.in) :
+ 5 - (ar.in - ar.last));
+ strm->avail_out = (unsigned)(ar.out < ar.end ? 257 + (ar.end - ar.out) :
+ 257 - (ar.out - ar.end));
+ state->hold = ar.hold;
+ state->bits = ar.bits;
+ return;
+ }
+
Index: llvm/runtime/zlib/contrib/inflate86/inffast.S
diff -c /dev/null llvm/runtime/zlib/contrib/inflate86/inffast.S:1.1.2.1
*** /dev/null Mon Mar 1 17:58:58 2004
--- llvm/runtime/zlib/contrib/inflate86/inffast.S Mon Mar 1 17:58:48 2004
***************
*** 0 ****
--- 1,1377 ----
+ /*
+ * inffast.S is a hand tuned assembler version of:
+ *
+ * inffast.c -- fast decoding
+ * Copyright (C) 1995-2003 Mark Adler
+ * For conditions of distribution and use, see copyright notice in zlib.h
+ *
+ * Copyright (C) 2003 Chris Anderson <christop at charm.net>
+ * Please use the copyright conditions above.
+ *
+ * This version (Jan-23-2003) of inflate_fast was coded and tested under
+ * GNU/Linux on a pentium 3, using the gcc-3.2 compiler distribution. On that
+ * machine, I found that gzip style archives decompressed about 20% faster than
+ * the gcc-3.2 -O3 -fomit-frame-pointer compiled version. Your results will
+ * depend on how large of a buffer is used for z_stream.next_in & next_out
+ * (8K-32K worked best for my 256K cpu cache) and how much overhead there is in
+ * stream processing I/O and crc32/addler32. In my case, this routine used
+ * 70% of the cpu time and crc32 used 20%.
+ *
+ * I am confident that this version will work in the general case, but I have
+ * not tested a wide variety of datasets or a wide variety of platforms.
+ *
+ * Jan-24-2003 -- Added -DUSE_MMX define for slightly faster inflating.
+ * It should be a runtime flag instead of compile time flag...
+ *
+ * Jan-26-2003 -- Added runtime check for MMX support with cpuid instruction.
+ * With -DUSE_MMX, only MMX code is compiled. With -DNO_MMX, only non-MMX code
+ * is compiled. Without either option, runtime detection is enabled. Runtime
+ * detection should work on all modern cpus and the recomended algorithm (flip
+ * ID bit on eflags and then use the cpuid instruction) is used in many
+ * multimedia applications. Tested under win2k with gcc-2.95 and gas-2.12
+ * distributed with cygwin3. Compiling with gcc-2.95 -c inffast.S -o
+ * inffast.obj generates a COFF object which can then be linked with MSVC++
+ * compiled code. Tested under FreeBSD 4.7 with gcc-2.95.
+ *
+ * Jan-28-2003 -- Tested Athlon XP... MMX mode is slower than no MMX (and
+ * slower than compiler generated code). Adjusted cpuid check to use the MMX
+ * code only for Pentiums < P4 until I have more data on the P4. Speed
+ * improvment is only about 15% on the Athlon when compared with code generated
+ * with MSVC++. Not sure yet, but I think the P4 will also be slower using the
+ * MMX mode because many of it's x86 ALU instructions execute in .5 cycles and
+ * have less latency than MMX ops. Added code to buffer the last 11 bytes of
+ * the input stream since the MMX code grabs bits in chunks of 32, which
+ * differs from the inffast.c algorithm. I don't think there would have been
+ * read overruns where a page boundary was crossed (a segfault), but there
+ * could have been overruns when next_in ends on unaligned memory (unintialized
+ * memory read).
+ *
+ * Mar-13-2003 -- P4 MMX is slightly slower than P4 NO_MMX. I created a C
+ * version of the non-MMX code so that it doesn't depend on zstrm and zstate
+ * structure offsets which are hard coded in this file. This was last tested
+ * with zlib-1.2.0 which is currently in beta testing, newer versions of this
+ * and inffas86.c can be found at http://www.eetbeetee.com/zlib/ and
+ * http://www.charm.net/~christop/zlib/
+ */
+
+
+ /*
+ * if you have underscore linking problems (_inflate_fast undefined), try
+ * using -DGAS_COFF
+ */
+ #if ! defined( GAS_COFF ) && ! defined( GAS_ELF )
+
+ #if defined( WIN32 ) || defined( __CYGWIN__ )
+ #define GAS_COFF /* windows object format */
+ #else
+ #define GAS_ELF
+ #endif
+
+ #endif /* ! GAS_COFF && ! GAS_ELF */
+
+
+ #if defined( GAS_COFF )
+
+ /* coff externals have underscores */
+ #define inflate_fast _inflate_fast
+ #define inflate_fast_use_mmx _inflate_fast_use_mmx
+
+ #endif /* GAS_COFF */
+
+
+ .file "inffast.S"
+
+ .globl inflate_fast
+
+ .text
+ .align 4,0
+ .L_invalid_literal_length_code_msg:
+ .string "invalid literal/length code"
+
+ .align 4,0
+ .L_invalid_distance_code_msg:
+ .string "invalid distance code"
+
+ .align 4,0
+ .L_invalid_distance_too_far_msg:
+ .string "invalid distance too far back"
+
+ #if ! defined( NO_MMX )
+ .align 4,0
+ .L_mask: /* mask[N] = ( 1 << N ) - 1 */
+ .long 0
+ .long 1
+ .long 3
+ .long 7
+ .long 15
+ .long 31
+ .long 63
+ .long 127
+ .long 255
+ .long 511
+ .long 1023
+ .long 2047
+ .long 4095
+ .long 8191
+ .long 16383
+ .long 32767
+ .long 65535
+ .long 131071
+ .long 262143
+ .long 524287
+ .long 1048575
+ .long 2097151
+ .long 4194303
+ .long 8388607
+ .long 16777215
+ .long 33554431
+ .long 67108863
+ .long 134217727
+ .long 268435455
+ .long 536870911
+ .long 1073741823
+ .long 2147483647
+ .long 4294967295
+ #endif /* NO_MMX */
+
+ .text
+
+ /*
+ * struct z_stream offsets, in zlib.h
+ */
+ #define next_in_strm 0 /* strm->next_in */
+ #define avail_in_strm 4 /* strm->avail_in */
+ #define next_out_strm 12 /* strm->next_out */
+ #define avail_out_strm 16 /* strm->avail_out */
+ #define msg_strm 24 /* strm->msg */
+ #define state_strm 28 /* strm->state */
+
+ /*
+ * struct inflate_state offsets, in inflate.h
+ */
+ #define mode_state 0 /* state->mode */
+ #define wsize_state 32 /* state->wsize */
+ #define write_state 40 /* state->write */
+ #define window_state 44 /* state->window */
+ #define hold_state 48 /* state->hold */
+ #define bits_state 52 /* state->bits */
+ #define lencode_state 68 /* state->lencode */
+ #define distcode_state 72 /* state->distcode */
+ #define lenbits_state 76 /* state->lenbits */
+ #define distbits_state 80 /* state->distbits */
+
+ /*
+ * inflate_fast's activation record
+ */
+ #define local_var_size 64 /* how much local space for vars */
+ #define strm_sp 88 /* first arg: z_stream * (local_var_size + 24) */
+ #define start_sp 92 /* second arg: unsigned int (local_var_size + 28) */
+
+ /*
+ * offsets for local vars on stack
+ */
+ #define out 60 /* unsigned char* */
+ #define window 56 /* unsigned char* */
+ #define wsize 52 /* unsigned int */
+ #define write 48 /* unsigned int */
+ #define in 44 /* unsigned char* */
+ #define beg 40 /* unsigned char* */
+ #define buf 28 /* char[ 12 ] */
+ #define len 24 /* unsigned int */
+ #define last 20 /* unsigned char* */
+ #define end 16 /* unsigned char* */
+ #define dcode 12 /* code* */
+ #define lcode 8 /* code* */
+ #define dmask 4 /* unsigned int */
+ #define lmask 0 /* unsigned int */
+
+ /*
+ * typedef enum inflate_mode consts, in inflate.h
+ */
+ #ifndef NO_GUNZIP
+ #define GUNZIP
+ #endif
+
+ #ifdef GUNZIP
+ #define INFLATE_MODE_TYPE 11 /* state->mode flags enum-ed in inflate.h */
+ #define INFLATE_MODE_BAD 26
+ #else
+ #define INFLATE_MODE_TYPE 3
+ #define INFLATE_MODE_BAD 17
+ #endif
+
+
+ #if ! defined( USE_MMX ) && ! defined( NO_MMX )
+
+ #define RUN_TIME_MMX
+
+ #define CHECK_MMX 1
+ #define DO_USE_MMX 2
+ #define DONT_USE_MMX 3
+
+ .globl inflate_fast_use_mmx
+
+ .data
+
+ .align 4,0
+ inflate_fast_use_mmx: /* integer flag for run time control 1=check,2=mmx,3=no */
+ .long CHECK_MMX
+
+ #if defined( GAS_ELF )
+ /* elf info */
+ .type inflate_fast_use_mmx, at object
+ .size inflate_fast_use_mmx,4
+ #endif
+
+ #endif /* RUN_TIME_MMX */
+
+ #if defined( GAS_COFF )
+ /* coff info: scl 2 = extern, type 32 = function */
+ .def inflate_fast; .scl 2; .type 32; .endef
+ #endif
+
+ .text
+
+ .align 32,0x90
+ inflate_fast:
+ pushl %edi
+ pushl %esi
+ pushl %ebp
+ pushl %ebx
+ pushf /* save eflags (strm_sp, state_sp assumes this is 32 bits) */
+ subl $local_var_size, %esp
+ cld
+
+ #define strm_r %esi
+ #define state_r %edi
+
+ movl strm_sp(%esp), strm_r
+ movl state_strm(strm_r), state_r
+
+ /* in = strm->next_in;
+ * out = strm->next_out;
+ * last = in + strm->avail_in - 11;
+ * beg = out - (start - strm->avail_out);
+ * end = out + (strm->avail_out - 257);
+ */
+ movl avail_in_strm(strm_r), %edx
+ movl next_in_strm(strm_r), %eax
+
+ addl %eax, %edx /* avail_in += next_in */
+ subl $11, %edx /* avail_in -= 11 */
+
+ movl %eax, in(%esp)
+ movl %edx, last(%esp)
+
+ movl start_sp(%esp), %ebp
+ movl avail_out_strm(strm_r), %ecx
+ movl next_out_strm(strm_r), %ebx
+
+ subl %ecx, %ebp /* start -= avail_out */
+ negl %ebp /* start = -start */
+ addl %ebx, %ebp /* start += next_out */
+
+ subl $257, %ecx /* avail_out -= 257 */
+ addl %ebx, %ecx /* avail_out += out */
+
+ movl %ebx, out(%esp)
+ movl %ebp, beg(%esp)
+ movl %ecx, end(%esp)
+
+ /* wsize = state->wsize;
+ * write = state->write;
+ * window = state->window;
+ * hold = state->hold;
+ * bits = state->bits;
+ * lcode = state->lencode;
+ * dcode = state->distcode;
+ * lmask = ( 1 << state->lenbits ) - 1;
+ * dmask = ( 1 << state->distbits ) - 1;
+ */
+
+ movl lencode_state(state_r), %eax
+ movl distcode_state(state_r), %ecx
+
+ movl %eax, lcode(%esp)
+ movl %ecx, dcode(%esp)
+
+ movl $1, %eax
+ movl lenbits_state(state_r), %ecx
+ shll %cl, %eax
+ decl %eax
+ movl %eax, lmask(%esp)
+
+ movl $1, %eax
+ movl distbits_state(state_r), %ecx
+ shll %cl, %eax
+ decl %eax
+ movl %eax, dmask(%esp)
+
+ movl wsize_state(state_r), %eax
+ movl write_state(state_r), %ecx
+ movl window_state(state_r), %edx
+
+ movl %eax, wsize(%esp)
+ movl %ecx, write(%esp)
+ movl %edx, window(%esp)
+
+ movl hold_state(state_r), %ebp
+ movl bits_state(state_r), %ebx
+
+ #undef strm_r
+ #undef state_r
+
+ #define in_r %esi
+ #define from_r %esi
+ #define out_r %edi
+
+ movl in(%esp), in_r
+ movl last(%esp), %ecx
+ cmpl in_r, %ecx
+ ja .L_align_long /* if in < last */
+
+ addl $11, %ecx /* ecx = &in[ avail_in ] */
+ subl in_r, %ecx /* ecx = avail_in */
+ movl $12, %eax
+ subl %ecx, %eax /* eax = 12 - avail_in */
+ leal buf(%esp), %edi
+ rep movsb /* memcpy( buf, in, avail_in ) */
+ movl %eax, %ecx
+ xorl %eax, %eax
+ rep stosb /* memset( &buf[ avail_in ], 0, 12 - avail_in ) */
+ leal buf(%esp), in_r /* in = buf */
+ movl in_r, last(%esp) /* last = in, do just one iteration */
+ jmp .L_is_aligned
+
+ /* align in_r on long boundary */
+ .L_align_long:
+ testl $3, in_r
+ jz .L_is_aligned
+ xorl %eax, %eax
+ movb (in_r), %al
+ incl in_r
+ movl %ebx, %ecx
+ addl $8, %ebx
+ shll %cl, %eax
+ orl %eax, %ebp
+ jmp .L_align_long
+
+ .L_is_aligned:
+ movl out(%esp), out_r
+
+ #if defined( NO_MMX )
+ jmp .L_do_loop
+ #endif
+
+ #if defined( USE_MMX )
+ jmp .L_init_mmx
+ #endif
+
+ /*** Runtime MMX check ***/
+
+ #if defined( RUN_TIME_MMX )
+ .L_check_mmx:
+ cmpl $DO_USE_MMX, inflate_fast_use_mmx
+ je .L_init_mmx
+ ja .L_do_loop /* > 2 */
+
+ pushl %eax
+ pushl %ebx
+ pushl %ecx
+ pushl %edx
+ pushf
+ movl (%esp), %eax /* copy eflags to eax */
+ xorl $0x200000, (%esp) /* try toggling ID bit of eflags (bit 21)
+ * to see if cpu supports cpuid...
+ * ID bit method not supported by NexGen but
+ * bios may load a cpuid instruction and
+ * cpuid may be disabled on Cyrix 5-6x86 */
+ popf
+ pushf
+ popl %edx /* copy new eflags to edx */
+ xorl %eax, %edx /* test if ID bit is flipped */
+ jz .L_dont_use_mmx /* not flipped if zero */
+ xorl %eax, %eax
+ cpuid
+ cmpl $0x756e6547, %ebx /* check for GenuineIntel in ebx,ecx,edx */
+ jne .L_dont_use_mmx
+ cmpl $0x6c65746e, %ecx
+ jne .L_dont_use_mmx
+ cmpl $0x49656e69, %edx
+ jne .L_dont_use_mmx
+ movl $1, %eax
+ cpuid /* get cpu features */
+ shrl $8, %eax
+ andl $15, %eax
+ cmpl $6, %eax /* check for Pentium family, is 0xf for P4 */
+ jne .L_dont_use_mmx
+ testl $0x800000, %edx /* test if MMX feature is set (bit 23) */
+ jnz .L_use_mmx
+ jmp .L_dont_use_mmx
+ .L_use_mmx:
+ movl $DO_USE_MMX, inflate_fast_use_mmx
+ jmp .L_check_mmx_pop
+ .L_dont_use_mmx:
+ movl $DONT_USE_MMX, inflate_fast_use_mmx
+ .L_check_mmx_pop:
+ popl %edx
+ popl %ecx
+ popl %ebx
+ popl %eax
+ jmp .L_check_mmx
+ #endif
+
+
+ /*** Non-MMX code ***/
+
+ #if defined ( NO_MMX ) || defined( RUN_TIME_MMX )
+
+ #define hold_r %ebp
+ #define bits_r %bl
+ #define bitslong_r %ebx
+
+ .align 32,0x90
+ .L_while_test:
+ /* while (in < last && out < end)
+ */
+ cmpl out_r, end(%esp)
+ jbe .L_break_loop /* if (out >= end) */
+
+ cmpl in_r, last(%esp)
+ jbe .L_break_loop
+
+ .L_do_loop:
+ /* regs: %esi = in, %ebp = hold, %bl = bits, %edi = out
+ *
+ * do {
+ * if (bits < 15) {
+ * hold |= *((unsigned short *)in)++ << bits;
+ * bits += 16
+ * }
+ * this = lcode[hold & lmask]
+ */
+ cmpb $15, bits_r
+ ja .L_get_length_code /* if (15 < bits) */
+
+ xorl %eax, %eax
+ lodsw /* al = *(ushort *)in++ */
+ movb bits_r, %cl /* cl = bits, needs it for shifting */
+ addb $16, bits_r /* bits += 16 */
+ shll %cl, %eax
+ orl %eax, hold_r /* hold |= *((ushort *)in)++ << bits */
+
+ .L_get_length_code:
+ movl lmask(%esp), %edx /* edx = lmask */
+ movl lcode(%esp), %ecx /* ecx = lcode */
+ andl hold_r, %edx /* edx &= hold */
+ movl (%ecx,%edx,4), %eax /* eax = lcode[hold & lmask] */
+
+ .L_dolen:
+ /* regs: %esi = in, %ebp = hold, %bl = bits, %edi = out
+ *
+ * dolen:
+ * bits -= this.bits;
+ * hold >>= this.bits
+ */
+ movb %ah, %cl /* cl = this.bits */
+ subb %ah, bits_r /* bits -= this.bits */
+ shrl %cl, hold_r /* hold >>= this.bits */
+
+ /* check if op is a literal
+ * if (op == 0) {
+ * PUP(out) = this.val;
+ * }
+ */
+ testb %al, %al
+ jnz .L_test_for_length_base /* if (op != 0) 45.7% */
+
+ shrl $16, %eax /* output this.val char */
+ stosb
+ jmp .L_while_test
+
+ .L_test_for_length_base:
+ /* regs: %esi = in, %ebp = hold, %bl = bits, %edi = out, %edx = len
+ *
+ * else if (op & 16) {
+ * len = this.val
+ * op &= 15
+ * if (op) {
+ * if (op > bits) {
+ * hold |= *((unsigned short *)in)++ << bits;
+ * bits += 16
+ * }
+ * len += hold & mask[op];
+ * bits -= op;
+ * hold >>= op;
+ * }
+ */
+ #define len_r %edx
+ movl %eax, len_r /* len = this */
+ shrl $16, len_r /* len = this.val */
+ movb %al, %cl
+
+ testb $16, %al
+ jz .L_test_for_second_level_length /* if ((op & 16) == 0) 8% */
+ andb $15, %cl /* op &= 15 */
+ jz .L_save_len /* if (!op) */
+ cmpb %cl, bits_r
+ jae .L_add_bits_to_len /* if (op <= bits) */
+
+ movb %cl, %ch /* stash op in ch, freeing cl */
+ xorl %eax, %eax
+ lodsw /* al = *(ushort *)in++ */
+ movb bits_r, %cl /* cl = bits, needs it for shifting */
+ addb $16, bits_r /* bits += 16 */
+ shll %cl, %eax
+ orl %eax, hold_r /* hold |= *((ushort *)in)++ << bits */
+ movb %ch, %cl /* move op back to ecx */
+
+ .L_add_bits_to_len:
+ movl $1, %eax
+ shll %cl, %eax
+ decl %eax
+ subb %cl, bits_r
+ andl hold_r, %eax /* eax &= hold */
+ shrl %cl, hold_r
+ addl %eax, len_r /* len += hold & mask[op] */
+
+ .L_save_len:
+ movl len_r, len(%esp) /* save len */
+ #undef len_r
+
+ .L_decode_distance:
+ /* regs: %esi = in, %ebp = hold, %bl = bits, %edi = out, %edx = dist
+ *
+ * if (bits < 15) {
+ * hold |= *((unsigned short *)in)++ << bits;
+ * bits += 16
+ * }
+ * this = dcode[hold & dmask];
+ * dodist:
+ * bits -= this.bits;
+ * hold >>= this.bits;
+ * op = this.op;
+ */
+
+ cmpb $15, bits_r
+ ja .L_get_distance_code /* if (15 < bits) */
+
+ xorl %eax, %eax
+ lodsw /* al = *(ushort *)in++ */
+ movb bits_r, %cl /* cl = bits, needs it for shifting */
+ addb $16, bits_r /* bits += 16 */
+ shll %cl, %eax
+ orl %eax, hold_r /* hold |= *((ushort *)in)++ << bits */
+
+ .L_get_distance_code:
+ movl dmask(%esp), %edx /* edx = dmask */
+ movl dcode(%esp), %ecx /* ecx = dcode */
+ andl hold_r, %edx /* edx &= hold */
+ movl (%ecx,%edx,4), %eax /* eax = dcode[hold & dmask] */
+
+ #define dist_r %edx
+ .L_dodist:
+ movl %eax, dist_r /* dist = this */
+ shrl $16, dist_r /* dist = this.val */
+ movb %ah, %cl
+ subb %ah, bits_r /* bits -= this.bits */
+ shrl %cl, hold_r /* hold >>= this.bits */
+
+ /* if (op & 16) {
+ * dist = this.val
+ * op &= 15
+ * if (op > bits) {
+ * hold |= *((unsigned short *)in)++ << bits;
+ * bits += 16
+ * }
+ * dist += hold & mask[op];
+ * bits -= op;
+ * hold >>= op;
+ */
+ movb %al, %cl /* cl = this.op */
+
+ testb $16, %al /* if ((op & 16) == 0) */
+ jz .L_test_for_second_level_dist
+ andb $15, %cl /* op &= 15 */
+ jz .L_check_dist_one
+ cmpb %cl, bits_r
+ jae .L_add_bits_to_dist /* if (op <= bits) 97.6% */
+
+ movb %cl, %ch /* stash op in ch, freeing cl */
+ xorl %eax, %eax
+ lodsw /* al = *(ushort *)in++ */
+ movb bits_r, %cl /* cl = bits, needs it for shifting */
+ addb $16, bits_r /* bits += 16 */
+ shll %cl, %eax
+ orl %eax, hold_r /* hold |= *((ushort *)in)++ << bits */
+ movb %ch, %cl /* move op back to ecx */
+
+ .L_add_bits_to_dist:
+ movl $1, %eax
+ shll %cl, %eax
+ decl %eax /* (1 << op) - 1 */
+ subb %cl, bits_r
+ andl hold_r, %eax /* eax &= hold */
+ shrl %cl, hold_r
+ addl %eax, dist_r /* dist += hold & ((1 << op) - 1) */
+ jmp .L_check_window
+
+ .L_check_window:
+ /* regs: %esi = from, %ebp = hold, %bl = bits, %edi = out, %edx = dist
+ * %ecx = nbytes
+ *
+ * nbytes = out - beg;
+ * if (dist <= nbytes) {
+ * from = out - dist;
+ * do {
+ * PUP(out) = PUP(from);
+ * } while (--len > 0) {
+ * }
+ */
+
+ movl in_r, in(%esp) /* save in so from can use it's reg */
+ movl out_r, %eax
+ subl beg(%esp), %eax /* nbytes = out - beg */
+
+ cmpl dist_r, %eax
+ jb .L_clip_window /* if (dist > nbytes) 4.2% */
+
+ movl len(%esp), %ecx
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+
+ subl $3, %ecx
+ movb (from_r), %al
+ movb %al, (out_r)
+ movb 1(from_r), %al
+ movb 2(from_r), %dl
+ addl $3, from_r
+ movb %al, 1(out_r)
+ movb %dl, 2(out_r)
+ addl $3, out_r
+ rep movsb
+
+ movl in(%esp), in_r /* move in back to %esi, toss from */
+ jmp .L_while_test
+
+ .align 16,0x90
+ .L_check_dist_one:
+ cmpl $1, dist_r
+ jne .L_check_window
+ cmpl out_r, beg(%esp)
+ je .L_check_window
+
+ decl out_r
+ movl len(%esp), %ecx
+ movb (out_r), %al
+ subl $3, %ecx
+
+ movb %al, 1(out_r)
+ movb %al, 2(out_r)
+ movb %al, 3(out_r)
+ addl $4, out_r
+ rep stosb
+
+ jmp .L_while_test
+
+ .align 16,0x90
+ .L_test_for_second_level_length:
+ /* else if ((op & 64) == 0) {
+ * this = lcode[this.val + (hold & mask[op])];
+ * }
+ */
+ testb $64, %al
+ jnz .L_test_for_end_of_block /* if ((op & 64) != 0) */
+
+ movl $1, %eax
+ shll %cl, %eax
+ decl %eax
+ andl hold_r, %eax /* eax &= hold */
+ addl %edx, %eax /* eax += this.val */
+ movl lcode(%esp), %edx /* edx = lcode */
+ movl (%edx,%eax,4), %eax /* eax = lcode[val + (hold&mask[op])] */
+ jmp .L_dolen
+
+ .align 16,0x90
+ .L_test_for_second_level_dist:
+ /* else if ((op & 64) == 0) {
+ * this = dcode[this.val + (hold & mask[op])];
+ * }
+ */
+ testb $64, %al
+ jnz .L_invalid_distance_code /* if ((op & 64) != 0) */
+
+ movl $1, %eax
+ shll %cl, %eax
+ decl %eax
+ andl hold_r, %eax /* eax &= hold */
+ addl %edx, %eax /* eax += this.val */
+ movl dcode(%esp), %edx /* edx = dcode */
+ movl (%edx,%eax,4), %eax /* eax = dcode[val + (hold&mask[op])] */
+ jmp .L_dodist
+
+ .align 16,0x90
+ .L_clip_window:
+ /* regs: %esi = from, %ebp = hold, %bl = bits, %edi = out, %edx = dist
+ * %ecx = nbytes
+ *
+ * else {
+ * if (dist > wsize) {
+ * invalid distance
+ * }
+ * from = window;
+ * nbytes = dist - nbytes;
+ * if (write == 0) {
+ * from += wsize - nbytes;
+ */
+ #define nbytes_r %ecx
+ movl %eax, nbytes_r
+ movl wsize(%esp), %eax /* prepare for dist compare */
+ negl nbytes_r /* nbytes = -nbytes */
+ movl window(%esp), from_r /* from = window */
+
+ cmpl dist_r, %eax
+ jb .L_invalid_distance_too_far /* if (dist > wsize) */
+
+ addl dist_r, nbytes_r /* nbytes = dist - nbytes */
+ cmpl $0, write(%esp)
+ jne .L_wrap_around_window /* if (write != 0) */
+
+ subl nbytes_r, %eax
+ addl %eax, from_r /* from += wsize - nbytes */
+
+ /* regs: %esi = from, %ebp = hold, %bl = bits, %edi = out, %edx = dist
+ * %ecx = nbytes, %eax = len
+ *
+ * if (nbytes < len) {
+ * len -= nbytes;
+ * do {
+ * PUP(out) = PUP(from);
+ * } while (--nbytes);
+ * from = out - dist;
+ * }
+ * }
+ */
+ #define len_r %eax
+ movl len(%esp), len_r
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1 /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+ jmp .L_do_copy1
+
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1 /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+ jmp .L_do_copy1
+
+ .L_wrap_around_window:
+ /* regs: %esi = from, %ebp = hold, %bl = bits, %edi = out, %edx = dist
+ * %ecx = nbytes, %eax = write, %eax = len
+ *
+ * else if (write < nbytes) {
+ * from += wsize + write - nbytes;
+ * nbytes -= write;
+ * if (nbytes < len) {
+ * len -= nbytes;
+ * do {
+ * PUP(out) = PUP(from);
+ * } while (--nbytes);
+ * from = window;
+ * nbytes = write;
+ * if (nbytes < len) {
+ * len -= nbytes;
+ * do {
+ * PUP(out) = PUP(from);
+ * } while(--nbytes);
+ * from = out - dist;
+ * }
+ * }
+ * }
+ */
+ #define write_r %eax
+ movl write(%esp), write_r
+ cmpl write_r, nbytes_r
+ jbe .L_contiguous_in_window /* if (write >= nbytes) */
+
+ addl wsize(%esp), from_r
+ addl write_r, from_r
+ subl nbytes_r, from_r /* from += wsize + write - nbytes */
+ subl write_r, nbytes_r /* nbytes -= write */
+ #undef write_r
+
+ movl len(%esp), len_r
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1 /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl window(%esp), from_r /* from = window */
+ movl write(%esp), nbytes_r /* nbytes = write */
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1 /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+ jmp .L_do_copy1
+
+ .L_contiguous_in_window:
+ /* regs: %esi = from, %ebp = hold, %bl = bits, %edi = out, %edx = dist
+ * %ecx = nbytes, %eax = write, %eax = len
+ *
+ * else {
+ * from += write - nbytes;
+ * if (nbytes < len) {
+ * len -= nbytes;
+ * do {
+ * PUP(out) = PUP(from);
+ * } while (--nbytes);
+ * from = out - dist;
+ * }
+ * }
+ */
+ #define write_r %eax
+ addl write_r, from_r
+ subl nbytes_r, from_r /* from += write - nbytes */
+ #undef write_r
+
+ movl len(%esp), len_r
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1 /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+
+ .L_do_copy1:
+ /* regs: %esi = from, %esi = in, %ebp = hold, %bl = bits, %edi = out
+ * %eax = len
+ *
+ * while (len > 0) {
+ * PUP(out) = PUP(from);
+ * len--;
+ * }
+ * }
+ * } while (in < last && out < end);
+ */
+ #undef nbytes_r
+ #define in_r %esi
+ movl len_r, %ecx
+ rep movsb
+
+ movl in(%esp), in_r /* move in back to %esi, toss from */
+ jmp .L_while_test
+
+ #undef len_r
+ #undef dist_r
+
+ #endif /* NO_MMX || RUN_TIME_MMX */
+
+
+ /*** MMX code ***/
+
+ #if defined( USE_MMX ) || defined( RUN_TIME_MMX )
+
+ .align 32,0x90
+ .L_init_mmx:
+ emms
+
+ #undef bits_r
+ #undef bitslong_r
+ #define bitslong_r %ebp
+ #define hold_mm %mm0
+ movd %ebp, hold_mm
+ movl %ebx, bitslong_r
+
+ #define used_mm %mm1
+ #define dmask2_mm %mm2
+ #define lmask2_mm %mm3
+ #define lmask_mm %mm4
+ #define dmask_mm %mm5
+ #define tmp_mm %mm6
+
+ movd lmask(%esp), lmask_mm
+ movq lmask_mm, lmask2_mm
+ movd dmask(%esp), dmask_mm
+ movq dmask_mm, dmask2_mm
+ pxor used_mm, used_mm
+ movl lcode(%esp), %ebx /* ebx = lcode */
+ jmp .L_do_loop_mmx
+
+ .align 32,0x90
+ .L_while_test_mmx:
+ /* while (in < last && out < end)
+ */
+ cmpl out_r, end(%esp)
+ jbe .L_break_loop /* if (out >= end) */
+
+ cmpl in_r, last(%esp)
+ jbe .L_break_loop
+
+ .L_do_loop_mmx:
+ psrlq used_mm, hold_mm /* hold_mm >>= last bit length */
+
+ cmpl $32, bitslong_r
+ ja .L_get_length_code_mmx /* if (32 < bits) */
+
+ movd bitslong_r, tmp_mm
+ movd (in_r), %mm7
+ addl $4, in_r
+ psllq tmp_mm, %mm7
+ addl $32, bitslong_r
+ por %mm7, hold_mm /* hold_mm |= *((uint *)in)++ << bits */
+
+ .L_get_length_code_mmx:
+ pand hold_mm, lmask_mm
+ movd lmask_mm, %eax
+ movq lmask2_mm, lmask_mm
+ movl (%ebx,%eax,4), %eax /* eax = lcode[hold & lmask] */
+
+ .L_dolen_mmx:
+ movzbl %ah, %ecx /* ecx = this.bits */
+ movd %ecx, used_mm
+ subl %ecx, bitslong_r /* bits -= this.bits */
+
+ testb %al, %al
+ jnz .L_test_for_length_base_mmx /* if (op != 0) 45.7% */
+
+ shrl $16, %eax /* output this.val char */
+ stosb
+ jmp .L_while_test_mmx
+
+ .L_test_for_length_base_mmx:
+ #define len_r %edx
+ movl %eax, len_r /* len = this */
+ shrl $16, len_r /* len = this.val */
+
+ testb $16, %al
+ jz .L_test_for_second_level_length_mmx /* if ((op & 16) == 0) 8% */
+ andl $15, %eax /* op &= 15 */
+ jz .L_decode_distance_mmx /* if (!op) */
+
+ psrlq used_mm, hold_mm /* hold_mm >>= last bit length */
+ movd %eax, used_mm
+ movd hold_mm, %ecx
+ subl %eax, bitslong_r
+ andl .L_mask(,%eax,4), %ecx
+ addl %ecx, len_r /* len += hold & mask[op] */
+
+ .L_decode_distance_mmx:
+ psrlq used_mm, hold_mm /* hold_mm >>= last bit length */
+
+ cmpl $32, bitslong_r
+ ja .L_get_dist_code_mmx /* if (32 < bits) */
+
+ movd bitslong_r, tmp_mm
+ movd (in_r), %mm7
+ addl $4, in_r
+ psllq tmp_mm, %mm7
+ addl $32, bitslong_r
+ por %mm7, hold_mm /* hold_mm |= *((uint *)in)++ << bits */
+
+ .L_get_dist_code_mmx:
+ movl dcode(%esp), %ebx /* ebx = dcode */
+ pand hold_mm, dmask_mm
+ movd dmask_mm, %eax
+ movq dmask2_mm, dmask_mm
+ movl (%ebx,%eax,4), %eax /* eax = dcode[hold & lmask] */
+
+ .L_dodist_mmx:
+ #define dist_r %ebx
+ movzbl %ah, %ecx /* ecx = this.bits */
+ movl %eax, dist_r
+ shrl $16, dist_r /* dist = this.val */
+ subl %ecx, bitslong_r /* bits -= this.bits */
+ movd %ecx, used_mm
+
+ testb $16, %al /* if ((op & 16) == 0) */
+ jz .L_test_for_second_level_dist_mmx
+ andl $15, %eax /* op &= 15 */
+ jz .L_check_dist_one_mmx
+
+ .L_add_bits_to_dist_mmx:
+ psrlq used_mm, hold_mm /* hold_mm >>= last bit length */
+ movd %eax, used_mm /* save bit length of current op */
+ movd hold_mm, %ecx /* get the next bits on input stream */
+ subl %eax, bitslong_r /* bits -= op bits */
+ andl .L_mask(,%eax,4), %ecx /* ecx = hold & mask[op] */
+ addl %ecx, dist_r /* dist += hold & mask[op] */
+
+ .L_check_window_mmx:
+ movl in_r, in(%esp) /* save in so from can use it's reg */
+ movl out_r, %eax
+ subl beg(%esp), %eax /* nbytes = out - beg */
+
+ cmpl dist_r, %eax
+ jb .L_clip_window_mmx /* if (dist > nbytes) 4.2% */
+
+ movl len_r, %ecx
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+
+ subl $3, %ecx
+ movb (from_r), %al
+ movb %al, (out_r)
+ movb 1(from_r), %al
+ movb 2(from_r), %dl
+ addl $3, from_r
+ movb %al, 1(out_r)
+ movb %dl, 2(out_r)
+ addl $3, out_r
+ rep movsb
+
+ movl in(%esp), in_r /* move in back to %esi, toss from */
+ movl lcode(%esp), %ebx /* move lcode back to %ebx, toss dist */
+ jmp .L_while_test_mmx
+
+ .align 16,0x90
+ .L_check_dist_one_mmx:
+ cmpl $1, dist_r
+ jne .L_check_window_mmx
+ cmpl out_r, beg(%esp)
+ je .L_check_window_mmx
+
+ decl out_r
+ movl len_r, %ecx
+ movb (out_r), %al
+ subl $3, %ecx
+
+ movb %al, 1(out_r)
+ movb %al, 2(out_r)
+ movb %al, 3(out_r)
+ addl $4, out_r
+ rep stosb
+
+ movl lcode(%esp), %ebx /* move lcode back to %ebx, toss dist */
+ jmp .L_while_test_mmx
+
+ .align 16,0x90
+ .L_test_for_second_level_length_mmx:
+ testb $64, %al
+ jnz .L_test_for_end_of_block /* if ((op & 64) != 0) */
+
+ andl $15, %eax
+ psrlq used_mm, hold_mm /* hold_mm >>= last bit length */
+ movd hold_mm, %ecx
+ andl .L_mask(,%eax,4), %ecx
+ addl len_r, %ecx
+ movl (%ebx,%ecx,4), %eax /* eax = lcode[hold & lmask] */
+ jmp .L_dolen_mmx
+
+ .align 16,0x90
+ .L_test_for_second_level_dist_mmx:
+ testb $64, %al
+ jnz .L_invalid_distance_code /* if ((op & 64) != 0) */
+
+ andl $15, %eax
+ psrlq used_mm, hold_mm /* hold_mm >>= last bit length */
+ movd hold_mm, %ecx
+ andl .L_mask(,%eax,4), %ecx
+ movl dcode(%esp), %eax /* ecx = dcode */
+ addl dist_r, %ecx
+ movl (%eax,%ecx,4), %eax /* eax = lcode[hold & lmask] */
+ jmp .L_dodist_mmx
+
+ .align 16,0x90
+ .L_clip_window_mmx:
+ #define nbytes_r %ecx
+ movl %eax, nbytes_r
+ movl wsize(%esp), %eax /* prepare for dist compare */
+ negl nbytes_r /* nbytes = -nbytes */
+ movl window(%esp), from_r /* from = window */
+
+ cmpl dist_r, %eax
+ jb .L_invalid_distance_too_far /* if (dist > wsize) */
+
+ addl dist_r, nbytes_r /* nbytes = dist - nbytes */
+ cmpl $0, write(%esp)
+ jne .L_wrap_around_window_mmx /* if (write != 0) */
+
+ subl nbytes_r, %eax
+ addl %eax, from_r /* from += wsize - nbytes */
+
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1_mmx /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+ jmp .L_do_copy1_mmx
+
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1_mmx /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+ jmp .L_do_copy1_mmx
+
+ .L_wrap_around_window_mmx:
+ #define write_r %eax
+ movl write(%esp), write_r
+ cmpl write_r, nbytes_r
+ jbe .L_contiguous_in_window_mmx /* if (write >= nbytes) */
+
+ addl wsize(%esp), from_r
+ addl write_r, from_r
+ subl nbytes_r, from_r /* from += wsize + write - nbytes */
+ subl write_r, nbytes_r /* nbytes -= write */
+ #undef write_r
+
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1_mmx /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl window(%esp), from_r /* from = window */
+ movl write(%esp), nbytes_r /* nbytes = write */
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1_mmx /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+ jmp .L_do_copy1_mmx
+
+ .L_contiguous_in_window_mmx:
+ #define write_r %eax
+ addl write_r, from_r
+ subl nbytes_r, from_r /* from += write - nbytes */
+ #undef write_r
+
+ cmpl nbytes_r, len_r
+ jbe .L_do_copy1_mmx /* if (nbytes >= len) */
+
+ subl nbytes_r, len_r /* len -= nbytes */
+ rep movsb
+ movl out_r, from_r
+ subl dist_r, from_r /* from = out - dist */
+
+ .L_do_copy1_mmx:
+ #undef nbytes_r
+ #define in_r %esi
+ movl len_r, %ecx
+ rep movsb
+
+ movl in(%esp), in_r /* move in back to %esi, toss from */
+ movl lcode(%esp), %ebx /* move lcode back to %ebx, toss dist */
+ jmp .L_while_test_mmx
+
+ #undef hold_r
+ #undef bitslong_r
+
+ #endif /* USE_MMX || RUN_TIME_MMX */
+
+
+ /*** USE_MMX, NO_MMX, and RUNTIME_MMX from here on ***/
+
+ .L_invalid_distance_code:
+ /* else {
+ * strm->msg = "invalid distance code";
+ * state->mode = BAD;
+ * }
+ */
+ movl $.L_invalid_distance_code_msg, %ecx
+ movl $INFLATE_MODE_BAD, %edx
+ jmp .L_update_stream_state
+
+ .L_test_for_end_of_block:
+ /* else if (op & 32) {
+ * state->mode = TYPE;
+ * break;
+ * }
+ */
+ testb $32, %al
+ jz .L_invalid_literal_length_code /* if ((op & 32) == 0) */
+
+ movl $0, %ecx
+ movl $INFLATE_MODE_TYPE, %edx
+ jmp .L_update_stream_state
+
+ .L_invalid_literal_length_code:
+ /* else {
+ * strm->msg = "invalid literal/length code";
+ * state->mode = BAD;
+ * }
+ */
+ movl $.L_invalid_literal_length_code_msg, %ecx
+ movl $INFLATE_MODE_BAD, %edx
+ jmp .L_update_stream_state
+
+ .L_invalid_distance_too_far:
+ /* strm->msg = "invalid distance too far back";
+ * state->mode = BAD;
+ */
+ movl in(%esp), in_r /* from_r has in's reg, put in back */
+ movl $.L_invalid_distance_too_far_msg, %ecx
+ movl $INFLATE_MODE_BAD, %edx
+ jmp .L_update_stream_state
+
+ .L_update_stream_state:
+ /* set strm->msg = %ecx, strm->state->mode = %edx */
+ movl strm_sp(%esp), %eax
+ testl %ecx, %ecx /* if (msg != NULL) */
+ jz .L_skip_msg
+ movl %ecx, msg_strm(%eax) /* strm->msg = msg */
+ .L_skip_msg:
+ movl state_strm(%eax), %eax /* state = strm->state */
+ movl %edx, mode_state(%eax) /* state->mode = edx (BAD | TYPE) */
+ jmp .L_break_loop
+
+ .align 32,0x90
+ .L_break_loop:
+
+ /*
+ * Regs:
+ *
+ * bits = %ebp when mmx, and in %ebx when non-mmx
+ * hold = %hold_mm when mmx, and in %ebp when non-mmx
+ * in = %esi
+ * out = %edi
+ */
+
+ #if defined( USE_MMX ) || defined( RUN_TIME_MMX )
+
+ #if defined( RUN_TIME_MMX )
+
+ cmpl $DO_USE_MMX, inflate_fast_use_mmx
+ jne .L_update_next_in
+
+ #endif /* RUN_TIME_MMX */
+
+ movl %ebp, %ebx
+
+ .L_update_next_in:
+
+ #endif
+
+ #define strm_r %eax
+ #define state_r %edx
+
+ /* len = bits >> 3;
+ * in -= len;
+ * bits -= len << 3;
+ * hold &= (1U << bits) - 1;
+ * state->hold = hold;
+ * state->bits = bits;
+ * strm->next_in = in;
+ * strm->next_out = out;
+ */
+ movl strm_sp(%esp), strm_r
+ movl %ebx, %ecx
+ movl state_strm(strm_r), state_r
+ shrl $3, %ecx
+ subl %ecx, in_r
+ shll $3, %ecx
+ subl %ecx, %ebx
+ movl out_r, next_out_strm(strm_r)
+ movl %ebx, bits_state(state_r)
+ movl %ebx, %ecx
+
+ leal buf(%esp), %ebx
+ cmpl %ebx, last(%esp)
+ jne .L_buf_not_used /* if buf != last */
+
+ subl %ebx, in_r /* in -= buf */
+ movl next_in_strm(strm_r), %ebx
+ movl %ebx, last(%esp) /* last = strm->next_in */
+ addl %ebx, in_r /* in += strm->next_in */
+ movl avail_in_strm(strm_r), %ebx
+ subl $11, %ebx
+ addl %ebx, last(%esp) /* last = &strm->next_in[ avail_in - 11 ] */
+
+ .L_buf_not_used:
+ movl in_r, next_in_strm(strm_r)
+
+ movl $1, %ebx
+ shll %cl, %ebx
+ decl %ebx
+
+ #if defined( USE_MMX ) || defined( RUN_TIME_MMX )
+
+ #if defined( RUN_TIME_MMX )
+
+ cmpl $DO_USE_MMX, inflate_fast_use_mmx
+ jne .L_update_hold
+
+ #endif /* RUN_TIME_MMX */
+
+ psrlq used_mm, hold_mm /* hold_mm >>= last bit length */
+ movd hold_mm, %ebp
+
+ emms
+
+ .L_update_hold:
+
+ #endif /* USE_MMX || RUN_TIME_MMX */
+
+ andl %ebx, %ebp
+ movl %ebp, hold_state(state_r)
+
+ #define last_r %ebx
+
+ /* strm->avail_in = in < last ? 11 + (last - in) : 11 - (in - last) */
+ movl last(%esp), last_r
+ cmpl in_r, last_r
+ jbe .L_last_is_smaller /* if (in >= last) */
+
+ subl in_r, last_r /* last -= in */
+ addl $11, last_r /* last += 11 */
+ movl last_r, avail_in_strm(strm_r)
+ jmp .L_fixup_out
+ .L_last_is_smaller:
+ subl last_r, in_r /* in -= last */
+ negl in_r /* in = -in */
+ addl $11, in_r /* in += 11 */
+ movl in_r, avail_in_strm(strm_r)
+
+ #undef last_r
+ #define end_r %ebx
+
+ .L_fixup_out:
+ /* strm->avail_out = out < end ? 257 + (end - out) : 257 - (out - end)*/
+ movl end(%esp), end_r
+ cmpl out_r, end_r
+ jbe .L_end_is_smaller /* if (out >= end) */
+
+ subl out_r, end_r /* end -= out */
+ addl $257, end_r /* end += 257 */
+ movl end_r, avail_out_strm(strm_r)
+ jmp .L_done
+ .L_end_is_smaller:
+ subl end_r, out_r /* out -= end */
+ negl out_r /* out = -out */
+ addl $257, out_r /* out += 257 */
+ movl out_r, avail_out_strm(strm_r)
+
+ #undef end_r
+ #undef strm_r
+ #undef state_r
+
+ .L_done:
+ addl $local_var_size, %esp
+ popf
+ popl %ebx
+ popl %ebp
+ popl %esi
+ popl %edi
+ ret
+
+ #if defined( GAS_ELF )
+ /* elf info */
+ .type inflate_fast, at function
+ .size inflate_fast,.-inflate_fast
+ #endif
More information about the llvm-commits
mailing list