static lzo_bytep code_match ( LZO_COMPRESS_T *c, lzo_bytep op, lzo_uint m_len, lzo_uint m_off ) { lzo_uint x_len = m_len; lzo_uint x_off = m_off; c->match_bytes += (unsigned long) m_len; #if 0 /* static lzo_uint last_m_len = 0, last_m_off = 0; static lzo_uint prev_m_off[4]; static unsigned prev_m_off_ptr = 0; unsigned i; //if (m_len >= 3 && m_len <= M2_MAX_LEN && m_off <= M2_MAX_OFFSET) if (m_len >= 3 && m_len <= M2_MAX_LEN) { //if (m_len == last_m_len && m_off == last_m_off) //printf("last_m_len + last_m_off\n"); //else if (m_off == last_m_off) printf("last_m_off\n"); else { for (i = 0; i < 4; i++) if (m_off == prev_m_off[i]) printf("prev_m_off %u: %5ld\n",i,(long)m_off); } } last_m_len = m_len; last_m_off = prev_m_off[prev_m_off_ptr] = m_off; prev_m_off_ptr = (prev_m_off_ptr + 1) & 3; */ #endif assert(op > c->out); if (m_len == 2) { assert(m_off <= M1_MAX_OFFSET); assert(c->r1_lit > 0); assert(c->r1_lit < 4); m_off -= 1; #if defined(LZO1Z) *op++ = LZO_BYTE(M1_MARKER | (m_off >> 6)); *op++ = LZO_BYTE(m_off << 2); #else *op++ = LZO_BYTE(M1_MARKER | ((m_off & 3) << 2)); *op++ = LZO_BYTE(m_off >> 2); #endif c->m1a_m++; }
static #if LZO_ARCH_AVR __lzo_noinline #endif lzo_bytep store_run(lzo_bytep op, const lzo_bytep ii, lzo_uint r_len) { assert(r_len > 0); /* code a long R0 run */ if (r_len >= 512) { unsigned r_bits = 7; /* 256 << 7 == 32768 */ do { while (r_len >= (256u << r_bits)) { r_len -= (256u << r_bits); *op++ = 0; *op++ = LZO_BYTE((R0FAST - R0MIN) + r_bits); MEMCPY8_DS(op, ii, (256u << r_bits)); } } while (--r_bits > 0); } while (r_len >= R0FAST) { r_len -= R0FAST; *op++ = 0; *op++ = R0FAST - R0MIN; MEMCPY8_DS(op, ii, R0FAST); } if (r_len >= R0MIN) { /* code a short R0 run */ *op++ = 0; *op++ = LZO_BYTE(r_len - R0MIN); MEMCPY_DS(op, ii, r_len); } else if (r_len > 0) { /* code a 'normal' run */ *op++ = LZO_BYTE(r_len); MEMCPY_DS(op, ii, r_len); } assert(r_len == 0); return op; }
static lzo_bytep code_match ( LZO_COMPRESS_T *c, lzo_bytep op, lzo_uint m_len, lzo_uint m_off ) { if (m_len <= M2_MAX_LEN && m_off <= M2_MAX_OFFSET) { m_off -= 1; *op++ = LZO_BYTE(((m_len - 2) << 5) | ((m_off & 7) << 2)); *op++ = LZO_BYTE(m_off >> 3); c->m2_m++; }
static __inline__ void swd_getbyte (lzo1x_999_swd_t * s) { int c; if ((c = getbyte (*(s->c))) < 0) { if (s->look > 0) --s->look; } else { s->b[s->ip] = LZO_BYTE (c); if (s->ip < s->f) s->b_wrap[s->ip] = LZO_BYTE (c); } if (++s->ip == s->b_size) s->ip = 0; if (++s->bp == s->b_size) s->bp = 0; if (++s->rp == s->b_size) s->rp = 0; }
static lzo_byte * code_match ( LZO_COMPRESS_T *c, lzo_byte *op, lzo_uint m_len, lzo_uint m_off ) { if (m_len <= M2_MAX_LEN && m_off <= M2_MAX_OFFSET) { assert(m_len >= M2_MIN_LEN); assert(m_off >= M2_MIN_OFFSET); m_off -= M2_MIN_OFFSET; /* code match len + low offset bits */ *op++ = LZO_BYTE(((m_len - (M2_MIN_LEN - 2)) << M2O_BITS) | (m_off & M2O_MASK)); /* code high offset bits */ *op++ = LZO_BYTE(m_off >> M2O_BITS); c->m2_m++; }
static lzo_byte * code_match (lzo1x_999_t * c, lzo_byte * op, lzo_uint m_len, lzo_uint m_off) { lzo_uint x_len = m_len; lzo_uint x_off = m_off; c->match_bytes += m_len; if (m_len == 2) { m_off -= 1; *op++ = LZO_BYTE (M1_MARKER | ((m_off & 3) << 2)); *op++ = LZO_BYTE (m_off >> 2); c->m1a_m++; }
static __lzo_noinline int do_compress ( const lzo_bytep in , lzo_uint in_len, lzo_bytep out, lzo_uintp out_len, lzo_voidp wrkmem ) { const lzo_bytep ip; lzo_bytep op; const lzo_bytep const in_end = in + in_len; const lzo_bytep const ip_end = in + in_len - 9; const lzo_bytep ii; lzo_dict_p const dict = (lzo_dict_p) wrkmem; op = out; ip = in; ii = ip; ip++; for (;;) { const lzo_bytep m_pos; LZO_DEFINE_UNINITIALIZED_VAR(lzo_uint, m_off, 0); lzo_uint m_len; lzo_uint dindex; lzo_uint lit; DINDEX1(dindex,ip); GINDEX(m_pos,m_off,dict,dindex,in); if (LZO_CHECK_MPOS_NON_DET(m_pos,m_off,in,ip,M3_MAX_OFFSET)) goto literal; #if 1 if (m_off <= M2_MAX_OFFSET || m_pos[3] == ip[3]) goto try_match; DINDEX2(dindex,ip); #endif GINDEX(m_pos,m_off,dict,dindex,in); if (LZO_CHECK_MPOS_NON_DET(m_pos,m_off,in,ip,M3_MAX_OFFSET)) goto literal; if (m_off <= M2_MAX_OFFSET || m_pos[3] == ip[3]) goto try_match; goto literal; try_match: #if 0 && (LZO_OPT_UNALIGNED16) if (UA_GET_NE16(m_pos) != UA_GET_NE16(ip)) #else if (m_pos[0] != ip[0] || m_pos[1] != ip[1]) #endif { } else { if (m_pos[2] == ip[2]) { m_pos += 3; #if 0 if (m_off <= M2_MAX_OFFSET) goto match; if (lit <= 3) goto match; if (lit == 3) /* better compression, but slower */ { assert(op - 2 > out); op[-2] |= LZO_BYTE(3); *op++ = *ii++; *op++ = *ii++; *op++ = *ii++; goto code_match; } if (*m_pos == ip[3]) #endif goto match; } } /* a literal */ literal: UPDATE_I(dict,0,dindex,ip,in); if (++ip >= ip_end) break; continue; /* a match */ match: UPDATE_I(dict,0,dindex,ip,in); /* store current literal run */ lit = pd(ip,ii); if (lit > 0) { lzo_uint t = lit; if (t < 4 && op > out) op[-2] = LZO_BYTE(op[-2] | t); else if (t <= 31) *op++ = LZO_BYTE(t); else { lzo_uint tt = t - 31; *op++ = 0; while (tt > 255) { tt -= 255; UA_SET1(op, 0); op++; } assert(tt > 0); *op++ = LZO_BYTE(tt); } do *op++ = *ii++; while (--t > 0); } assert(ii == ip); /* code the match */ ip += 3; if (*m_pos++ != *ip++ || *m_pos++ != *ip++ || *m_pos++ != *ip++ || *m_pos++ != *ip++ || *m_pos++ != *ip++ || *m_pos++ != *ip++) { --ip; m_len = pd(ip, ii); assert(m_len >= 3); assert(m_len <= 8); if (m_off <= M2_MAX_OFFSET) { m_off -= 1; *op++ = LZO_BYTE(((m_len - 2) << 5) | ((m_off & 7) << 2)); *op++ = LZO_BYTE(m_off >> 3); } else if (m_len == 3 && m_off <= 2*M2_MAX_OFFSET && lit > 0)
lzo2a_999_compress_callback(const lzo_bytep in , lzo_uint in_len, lzo_bytep out, lzo_uintp out_len, lzo_voidp wrkmem, lzo_callback_p cb, lzo_uint max_chain) { lzo_bytep op; lzo_bytep bitp = 0; lzo_uint m_len, m_off; LZO_COMPRESS_T cc; LZO_COMPRESS_T* const c = &cc; lzo_swd_p const swd = (lzo_swd_p) wrkmem; int r; lzo_uint32_t b = 0; /* bit buffer */ unsigned k = 0; /* bits in bit buffer */ /* sanity check */ LZO_COMPILE_TIME_ASSERT(LZO2A_999_MEM_COMPRESS >= SIZEOF_LZO_SWD_T) c->init = 0; c->ip = c->in = in; c->in_end = in + in_len; c->cb = cb; c->m1 = c->m2 = c->m3 = c->m4 = 0; op = out; r = init_match(c, swd, NULL, 0, 0); if (r != 0) return r; if (max_chain > 0) swd->max_chain = max_chain; r = find_match(c, swd, 0, 0); if (r != 0) return r; while (c->look > 0) { lzo_uint lazy_match_min_gain = 0; #if (SWD_N >= 8192) lzo_uint extra1 = 0; #endif lzo_uint extra2 = 0; lzo_uint ahead = 0; m_len = c->m_len; m_off = c->m_off; #if (SWD_N >= 8192) if (m_off >= 8192) { if (m_len < M3_MIN_LEN) m_len = 0; else lazy_match_min_gain = 1; } else #endif if (m_len >= M1_MIN_LEN && m_len <= M1_MAX_LEN && m_off <= 256) { lazy_match_min_gain = 2; #if (SWD_N >= 8192) extra1 = 3; #endif extra2 = 2; } else if (m_len >= 10) lazy_match_min_gain = 1; else if (m_len >= 3) { lazy_match_min_gain = 1; #if (SWD_N >= 8192) extra1 = 1; #endif } else m_len = 0; /* try a lazy match */ if (lazy_match_min_gain > 0 && c->look > m_len) { unsigned char lit = LZO_BYTE(swd->b_char); r = find_match(c, swd, 1, 0); assert(r == 0); LZO_UNUSED(r); assert(c->look > 0); #if (SWD_N >= 8192) if (m_off < 8192 && c->m_off >= 8192) lazy_match_min_gain += extra1; else #endif if (m_len >= M1_MIN_LEN && m_len <= M1_MAX_LEN && m_off <= 256) { if (!(c->m_len >= M1_MIN_LEN && c->m_len <= M1_MAX_LEN && c->m_off <= 256)) lazy_match_min_gain += extra2; } if (c->m_len >= M1_MIN_LEN && c->m_len <= M1_MAX_LEN && c->m_off <= 256) { lazy_match_min_gain -= 1; } if ((lzo_int) lazy_match_min_gain < 1) lazy_match_min_gain = 1; if (c->m_len >= m_len + lazy_match_min_gain) { c->lazy++; #if !defined(NDEBUG) m_len = c->m_len; m_off = c->m_off; assert(lzo_memcmp(c->ip - c->look, c->ip - c->look - m_off, m_len) == 0); assert(m_len >= 3 || (m_len >= 2 && m_off <= 256)); #endif /* code literal */ putbit(0); putbyte(lit); c->lit_bytes++; continue; } else ahead = 1; assert(m_len > 0); } if (m_len == 0) { /* a literal */ putbit(0); putbyte(swd->b_char); c->lit_bytes++; r = find_match(c, swd, 1, 0); assert(r == 0); LZO_UNUSED(r); } else { assert(m_len >= M1_MIN_LEN); assert(m_off > 0); assert(m_off <= SWD_N); /* 2 - code match */ if (m_len >= M1_MIN_LEN && m_len <= M1_MAX_LEN && m_off <= 256) { putbit(1); putbit(0); putbits(2, m_len - M1_MIN_LEN); putbyte(m_off - 1); c->m1++; } #if (SWD_N >= 8192) else if (m_off >= 8192) { unsigned len = m_len; assert(m_len >= M3_MIN_LEN); putbit(1); putbit(1); putbyte(m_off & 31); putbyte(m_off >> 5); putbit(1); len -= M3_MIN_LEN - 1; while (len > 255) { len -= 255; putbyte(0); } putbyte(len); c->m4++; } #endif else { assert(m_len >= 3); putbit(1); putbit(1); if (m_len <= 9) { putbyte(((m_len - 2) << 5) | (m_off & 31)); putbyte(m_off >> 5); c->m2++; } else {
static int do_compress ( const lzo_bytep in , lzo_uint in_len, lzo_bytep out, lzo_uintp out_len, lzo_voidp wrkmem ) { const lzo_bytep ip; #if defined(__LZO_HASH_INCREMENTAL) lzo_xint dv; #endif lzo_bytep op; const lzo_bytep m_pos; const lzo_bytep const ip_end = in+in_len - DVAL_LEN - MIN_MATCH_LONG; const lzo_bytep const in_end = in+in_len - DVAL_LEN; const lzo_bytep ii; lzo_dict_p const dict = (lzo_dict_p) wrkmem; #if !defined(NDEBUG) const lzo_bytep m_pos_sav; #endif op = out; ip = in; ii = ip; /* point to start of literal run */ if (in_len <= MIN_MATCH_LONG + DVAL_LEN + 1) goto the_end; /* init dictionary */ #if (LZO_DETERMINISTIC) BZERO8_PTR(wrkmem,sizeof(lzo_dict_t),D_SIZE); #endif DVAL_FIRST(dv,ip); UPDATE_D(dict,0,dv,ip,in); ip++; DVAL_NEXT(dv,ip); do { LZO_DEFINE_UNINITIALIZED_VAR(lzo_uint, m_off, 0); lzo_uint dindex; DINDEX1(dindex,ip); GINDEX(m_pos,m_off,dict,dindex,in); if (LZO_CHECK_MPOS(m_pos,m_off,in,ip,MAX_OFFSET)) goto literal; if (m_pos[0] == ip[0] && m_pos[1] == ip[1] && m_pos[2] == ip[2]) goto match; DINDEX2(dindex,ip); GINDEX(m_pos,m_off,dict,dindex,in); if (LZO_CHECK_MPOS(m_pos,m_off,in,ip,MAX_OFFSET)) goto literal; if (m_pos[0] == ip[0] && m_pos[1] == ip[1] && m_pos[2] == ip[2]) goto match; goto literal; literal: UPDATE_I(dict,0,dindex,ip,in); if (++ip >= ip_end) break; continue; match: UPDATE_I(dict,0,dindex,ip,in); #if !defined(NDEBUG) && (LZO_DICT_USE_PTR) m_pos_sav = m_pos; #endif m_pos += 3; { /* we have found a match (of at least length 3) */ #if !defined(NDEBUG) && !(LZO_DICT_USE_PTR) assert((m_pos_sav = ip - m_off) == (m_pos - 3)); #endif /* 1) store the current literal run */ if (pd(ip,ii) > 0) { lzo_uint t = pd(ip,ii); #if 1 /* OPTIMIZED: inline the copying of a short run */ if (t < R0MIN) { *op++ = LZO_BYTE(t); MEMCPY_DS(op, ii, t); } else #endif op = store_run(op,ii,t); } /* 2a) compute match len */ ii = ip; /* point to start of current match */ /* we already matched MIN_MATCH bytes, * m_pos also already advanced MIN_MATCH bytes */ ip += MIN_MATCH; assert(m_pos < ip); /* try to match another MIN_MATCH_LONG - MIN_MATCH bytes * to see if we get a long match */ #define PS *m_pos++ != *ip++ #if (MIN_MATCH_LONG - MIN_MATCH == 2) /* MBITS == 2 */ if (PS || PS) #elif (MIN_MATCH_LONG - MIN_MATCH == 6) /* MBITS == 3 */ if (PS || PS || PS || PS || PS || PS) #elif (MIN_MATCH_LONG - MIN_MATCH == 14) /* MBITS == 4 */ if (PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS) #elif (MIN_MATCH_LONG - MIN_MATCH == 30) /* MBITS == 5 */ if (PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS || PS) #else # error "MBITS not yet implemented" #endif { lzo_uint m_len; /* 2b) code a short match */ assert(pd(ip,m_pos) == m_off); --ip; /* ran one too far, point back to non-match */ m_len = pd(ip, ii); assert(m_len >= MIN_MATCH_SHORT); assert(m_len <= MAX_MATCH_SHORT); assert(m_off > 0); assert(m_off <= MAX_OFFSET); assert(ii-m_off == m_pos_sav); assert(lzo_memcmp(m_pos_sav,ii,m_len) == 0); --m_off; /* code short match len + low offset bits */ *op++ = LZO_BYTE(((m_len - THRESHOLD) << OBITS) | (m_off & OMASK)); /* code high offset bits */ *op++ = LZO_BYTE(m_off >> OBITS); /* 2c) Insert phrases (beginning with ii+1) into the dictionary. */ #define SI /* nothing */ #define DI ++ii; DVAL_NEXT(dv,ii); UPDATE_D(dict,0,dv,ii,in); #define XI assert(ii < ip); ii = ip; DVAL_FIRST(dv,(ip)); #if (CLEVEL == 9) || (CLEVEL >= 7 && MBITS <= 4) || (CLEVEL >= 5 && MBITS <= 3) /* Insert the whole match (ii+1)..(ip-1) into dictionary. */ ++ii; do { DVAL_NEXT(dv,ii); UPDATE_D(dict,0,dv,ii,in); } while (++ii < ip); DVAL_NEXT(dv,ii); assert(ii == ip); DVAL_ASSERT(dv,ip); #elif (CLEVEL >= 3) SI DI DI XI #elif (CLEVEL >= 2) SI DI XI #else XI #endif } else { /* we've found a long match - see how far we can still go */ const lzo_bytep end; lzo_uint m_len; assert(ip <= in_end); assert(ii == ip - MIN_MATCH_LONG); if (pd(in_end,ip) <= (MAX_MATCH_LONG - MIN_MATCH_LONG)) end = in_end; else { end = ip + (MAX_MATCH_LONG - MIN_MATCH_LONG); assert(end < in_end); } while (ip < end && *m_pos == *ip) m_pos++, ip++; assert(ip <= in_end); /* 2b) code the long match */ m_len = pd(ip, ii); assert(m_len >= MIN_MATCH_LONG); assert(m_len <= MAX_MATCH_LONG); assert(m_off > 0); assert(m_off <= MAX_OFFSET); assert(ii-m_off == m_pos_sav); assert(lzo_memcmp(m_pos_sav,ii,m_len) == 0); assert(pd(ip,m_pos) == m_off); --m_off; /* code long match flag + low offset bits */ *op++ = LZO_BYTE(((MSIZE - 1) << OBITS) | (m_off & OMASK)); /* code high offset bits */ *op++ = LZO_BYTE(m_off >> OBITS); /* code match len */ *op++ = LZO_BYTE(m_len - MIN_MATCH_LONG); /* 2c) Insert phrases (beginning with ii+1) into the dictionary. */ #if (CLEVEL == 9) /* Insert the whole match (ii+1)..(ip-1) into dictionary. */ /* This is not recommended because it is slow. */ ++ii; do { DVAL_NEXT(dv,ii); UPDATE_D(dict,0,dv,ii,in); } while (++ii < ip); DVAL_NEXT(dv,ii); assert(ii == ip); DVAL_ASSERT(dv,ip); #elif (CLEVEL >= 8) SI DI DI DI DI DI DI DI DI XI #elif (CLEVEL >= 7) SI DI DI DI DI DI DI DI XI #elif (CLEVEL >= 6) SI DI DI DI DI DI DI XI #elif (CLEVEL >= 5) SI DI DI DI DI XI #elif (CLEVEL >= 4) SI DI DI DI XI #elif (CLEVEL >= 3) SI DI DI XI #elif (CLEVEL >= 2) SI DI XI #else XI #endif } /* ii now points to the start of next literal run */ assert(ii == ip); }
static int do_compress ( const lzo_byte *in , lzo_uint in_len, lzo_byte *out, lzo_uint *out_len, lzo_voidp wrkmem ) { #if 1 && defined(__GNUC__) && defined(__i386__) register const lzo_byte *ip __asm__("%esi"); #else register const lzo_byte *ip; #endif lzo_uint32 dv; lzo_byte *op; const lzo_byte * const in_end = in + in_len; const lzo_byte * const ip_end = in + in_len - 9; const lzo_byte *ii; const lzo_bytepp const dict = (const lzo_bytepp) wrkmem; op = out; ip = in; ii = ip; DVAL_FIRST(dv,ip); UPDATE_D(dict,cycle,dv,ip); ip++; DVAL_NEXT(dv,ip); while (1) { #if 1 && defined(__GNUC__) && defined(__i386__) register const lzo_byte *m_pos __asm__("%edi"); #else register const lzo_byte *m_pos; #endif lzo_uint m_len; lzo_ptrdiff_t m_off; lzo_uint lit; { lzo_uint dindex = DINDEX(dv,ip); m_pos = dict[dindex]; UPDATE_I(dict,cycle,dindex,ip); } if (LZO_CHECK_MPOS_NON_DET(m_pos,m_off,in,ip,M3_MAX_OFFSET)) { } #if defined(LZO_UNALIGNED_OK_2) else if (* (lzo_ushortp) m_pos != * (lzo_ushortp) ip) #else else if (m_pos[0] != ip[0] || m_pos[1] != ip[1]) #endif { } else { if (m_pos[2] == ip[2]) { m_pos += 3; if (m_off <= M2_MAX_OFFSET) goto match; #if 1 if (ip - ii <= 3) goto match; #else if (ip - ii == 3) /* better compression, but slower */ goto match; #endif if (*m_pos == ip[3]) goto match; } } /* a literal */ ++ip; if (ip >= ip_end) break; DVAL_NEXT(dv,ip); continue; /* a match */ match: /* store current literal run */ lit = ip - ii; if (lit > 0) { register lzo_uint t = lit; if (t < 4 && op > out) op[-2] |= LZO_BYTE(t); else if (t <= 31) *op++ = LZO_BYTE(t); else { register lzo_uint tt = t - 31; *op++ = 0; while (tt > 255) { tt -= 255; *op++ = 0; } assert(tt > 0); *op++ = LZO_BYTE(tt); } do *op++ = *ii++; while (--t > 0); } assert(ii == ip); /* code the match */ ip += 3; if (*m_pos++ != *ip++ || *m_pos++ != *ip++ || *m_pos++ != *ip++ || *m_pos++ != *ip++ || *m_pos++ != *ip++ || *m_pos++ != *ip++) { --ip; m_len = ip - ii; assert(m_len >= 3); assert(m_len <= 8); if (m_off <= M2_MAX_OFFSET) { m_off -= 1; *op++ = LZO_BYTE(((m_len - 2) << 5) | ((m_off & 7) << 2)); *op++ = LZO_BYTE(m_off >> 3); } else if (m_len == 3 && m_off <= 2*M2_MAX_OFFSET && lit > 0)