package flate

import 

type fastEncL6 struct {
	fastGen
	table  [tableSize]tableEntry
	bTable [tableSize]tableEntryPrev
}

func ( *fastEncL6) ( *tokens,  []byte) {
	const (
		            = 12 - 1
		 = 1 + 1 + 
		         = 4
	)
	if debugDeflate && .cur < 0 {
		panic(fmt.Sprint("e.cur < 0: ", .cur))
	}

	// Protect against e.cur wraparound.
	for .cur >= bufferReset {
		if len(.hist) == 0 {
			for  := range .table[:] {
				.table[] = tableEntry{}
			}
			for  := range .bTable[:] {
				.bTable[] = tableEntryPrev{}
			}
			.cur = maxMatchOffset
			break
		}
		// Shift down everything in the table that isn't already too far away.
		 := .cur + int32(len(.hist)) - maxMatchOffset
		for  := range .table[:] {
			 := .table[].offset
			if  <=  {
				 = 0
			} else {
				 =  - .cur + maxMatchOffset
			}
			.table[].offset = 
		}
		for  := range .bTable[:] {
			 := .bTable[]
			if .Cur.offset <=  {
				.Cur.offset = 0
				.Prev.offset = 0
			} else {
				.Cur.offset = .Cur.offset - .cur + maxMatchOffset
				if .Prev.offset <=  {
					.Prev.offset = 0
				} else {
					.Prev.offset = .Prev.offset - .cur + maxMatchOffset
				}
			}
			.bTable[] = 
		}
		.cur = maxMatchOffset
	}

	 := .addBlock()

	// This check isn't in the Snappy implementation, but there, the caller
	// instead of the callee handles this case.
	if len() <  {
		// We do not fill the token table.
		// This will be picked up by caller.
		.n = uint16(len())
		return
	}

	// Override src
	 = .hist
	 := 

	// sLimit is when to stop looking for offset/length copies. The inputMargin
	// lets us use a fast path for emitLiteral in the main loop, while we are
	// looking for copies.
	 := int32(len() - )

	// nextEmit is where in src the next emitLiteral should start from.
	 := load6432(, )
	// Repeat MUST be > 1 and within range
	 := int32(1)
	for {
		const  = 7
		const  = 1

		 := 
		var  int32
		var  int32
		for {
			 := hashLen(, tableBits, )
			 := hash7(, tableBits)
			 = 
			 =  +  + (-)>>
			if  >  {
				goto 
			}
			// Fetch a short+long candidate
			 := .table[]
			 := .bTable[]
			 := load6432(, )
			 := tableEntry{offset:  + .cur}
			.table[] = 
			 := &.bTable[]
			.Cur, .Prev = , .Cur

			// Calculate hashes of 'next'
			 = hashLen(, tableBits, )
			 = hash7(, tableBits)

			 = .Cur.offset - .cur
			if - < maxMatchOffset {
				if uint32() == load3232(, .Cur.offset-.cur) {
					// Long candidate matches at least 4 bytes.

					// Store the next match
					.table[] = tableEntry{offset:  + .cur}
					 := &.bTable[]
					.Cur, .Prev = tableEntry{offset:  + .cur}, .Cur

					// Check the previous long candidate as well.
					 := .Prev.offset - .cur
					if - < maxMatchOffset && uint32() == load3232(, .Prev.offset-.cur) {
						 = .matchlen(+4, +4, ) + 4
						 := .matchlen(+4, +4, ) + 4
						if  >  {
							 = 
							 = 
							break
						}
					}
					break
				}
				// Current value did not match, but check if previous long value does.
				 = .Prev.offset - .cur
				if - < maxMatchOffset && uint32() == load3232(, .Prev.offset-.cur) {
					// Store the next match
					.table[] = tableEntry{offset:  + .cur}
					 := &.bTable[]
					.Cur, .Prev = tableEntry{offset:  + .cur}, .Cur
					break
				}
			}

			 = .offset - .cur
			if - < maxMatchOffset && uint32() == load3232(, .offset-.cur) {
				// Found a 4 match...
				 = .matchlen(+4, +4, ) + 4

				// Look up next long candidate (at nextS)
				 = .bTable[]

				// Store the next match
				.table[] = tableEntry{offset:  + .cur}
				 := &.bTable[]
				.Cur, .Prev = tableEntry{offset:  + .cur}, .Cur

				// Check repeat at s + repOff
				const  = 1
				 :=  -  + 
				if load3232(, ) == uint32(>>(8*)) {
					 := .matchlen(+4+, +4, ) + 4
					if  >  {
						 = 
						 = 
						 += 
						// Not worth checking more.
						break
					}
				}

				// If the next long is a candidate, use that...
				 = .Cur.offset - .cur
				if - < maxMatchOffset {
					if load3232(, .Cur.offset-.cur) == uint32() {
						 := .matchlen(+4, +4, ) + 4
						if  >  {
							 = 
							 = 
							 = 
							// This is ok, but check previous as well.
						}
					}
					// If the previous long is a candidate, use that...
					 = .Prev.offset - .cur
					if - < maxMatchOffset && load3232(, .Prev.offset-.cur) == uint32() {
						 := .matchlen(+4, +4, ) + 4
						if  >  {
							 = 
							 = 
							 = 
							break
						}
					}
				}
				break
			}
			 = 
		}

		// A 4-byte match has been found. We'll later see if more than 4 bytes
		// match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
		// them as literal bytes.

		// Extend the 4-byte match as long as possible.
		if  == 0 {
			 = .matchlenLong(+4, +4, ) + 4
		} else if  == maxMatchLength {
			 += .matchlenLong(+, +, )
		}

		// Try to locate a better match by checking the end-of-match...
		if  :=  + ;  <  {
			// Allow some bytes at the beginning to mismatch.
			// Sweet spot is 2/3 bytes depending on input.
			// 3 is only a little better when it is but sometimes a lot worse.
			// The skipped bytes are tested in Extend backwards,
			// and still picked up as part of the match if they do.
			const  = 2
			 := &.bTable[hash7(load6432(, ), tableBits)]
			// Test current
			 := .Cur.offset - .cur -  + 
			 :=  + 
			 :=  - 
			if  < maxMatchOffset {
				if  > 0 &&  >= 0 {
					if  := .matchlenLong(, , );  >  {
						 = 
						 = 
						 = 
					}
				}
				// Test next:
				 = .Prev.offset - .cur -  + 
				 :=  - 
				if  > 0 &&  < maxMatchOffset &&  >= 0 {
					if  := .matchlenLong(, , );  >  {
						 = 
						 = 
						 = 
					}
				}
			}
		}

		// Extend backwards
		for  > 0 &&  >  && [-1] == [-1] {
			--
			--
			++
		}
		if  <  {
			if false {
				emitLiteral(, [:])
			} else {
				for ,  := range [:] {
					.tokens[.n] = token()
					.litHist[]++
					.n++
				}
			}
		}
		if false {
			if  >=  {
				panic(fmt.Sprintln("s-t", , ))
			}
			if ( - ) > maxMatchOffset {
				panic(fmt.Sprintln("mmo", -))
			}
			if  < baseMatchLength {
				panic("bml")
			}
		}

		.AddMatchLong(, uint32(--baseMatchOffset))
		 =  - 
		 += 
		 = 
		if  >=  {
			 =  + 1
		}

		if  >=  {
			// Index after match end.
			for  :=  + 1;  < int32(len())-8;  += 2 {
				 := load6432(, )
				.table[hashLen(, tableBits, )] = tableEntry{offset:  + .cur}
				 := &.bTable[hash7(, tableBits)]
				.Cur, .Prev = tableEntry{offset:  + .cur}, .Cur
			}
			goto 
		}

		// Store every long hash in-between and every second short.
		if true {
			for  :=  + 1;  < -1;  += 2 {
				 := load6432(, )
				 := tableEntry{offset:  + .cur}
				 := tableEntry{offset: .offset + 1}
				 := &.bTable[hash7(, tableBits)]
				 := &.bTable[hash7(>>8, tableBits)]
				.table[hashLen(, tableBits, )] = 
				.Cur, .Prev = , .Cur
				.Cur, .Prev = , .Cur
			}
		}

		// We could immediately start working at s now, but to improve
		// compression we first update the hash table at s-1 and at s.
		 = load6432(, )
	}

:
	if int() < len() {
		// If nothing was added, don't encode literals.
		if .n == 0 {
			return
		}

		emitLiteral(, [:])
	}
}