package flate

import 

type fastEncL4 struct {
	fastGen
	table  [tableSize]tableEntry
	bTable [tableSize]tableEntry
}

func ( *fastEncL4) ( *tokens,  []byte) {
	const (
		            = 12 - 1
		 = 1 + 1 + 
		         = 4
	)
	if debugDeflate && .cur < 0 {
		panic(fmt.Sprint("e.cur < 0: ", .cur))
	}
	// Protect against e.cur wraparound.
	for .cur >= bufferReset {
		if len(.hist) == 0 {
			for  := range .table[:] {
				.table[] = tableEntry{}
			}
			for  := range .bTable[:] {
				.bTable[] = tableEntry{}
			}
			.cur = maxMatchOffset
			break
		}
		// Shift down everything in the table that isn't already too far away.
		 := .cur + int32(len(.hist)) - maxMatchOffset
		for  := range .table[:] {
			 := .table[].offset
			if  <=  {
				 = 0
			} else {
				 =  - .cur + maxMatchOffset
			}
			.table[].offset = 
		}
		for  := range .bTable[:] {
			 := .bTable[].offset
			if  <=  {
				 = 0
			} else {
				 =  - .cur + maxMatchOffset
			}
			.bTable[].offset = 
		}
		.cur = maxMatchOffset
	}

	 := .addBlock()

	// This check isn't in the Snappy implementation, but there, the caller
	// instead of the callee handles this case.
	if len() <  {
		// We do not fill the token table.
		// This will be picked up by caller.
		.n = uint16(len())
		return
	}

	// Override src
	 = .hist
	 := 

	// sLimit is when to stop looking for offset/length copies. The inputMargin
	// lets us use a fast path for emitLiteral in the main loop, while we are
	// looking for copies.
	 := int32(len() - )

	// nextEmit is where in src the next emitLiteral should start from.
	 := load6432(, )
	for {
		const  = 6
		const  = 1

		 := 
		var  int32
		for {
			 := hashLen(, tableBits, )
			 := hash7(, tableBits)

			 = 
			 =  +  + (-)>>
			if  >  {
				goto 
			}
			// Fetch a short+long candidate
			 := .table[]
			 := .bTable[]
			 := load6432(, )
			 := tableEntry{offset:  + .cur}
			.table[] = 
			.bTable[] = 

			 = .offset - .cur
			if - < maxMatchOffset && uint32() == load3232(, .offset-.cur) {
				// We got a long match. Use that.
				break
			}

			 = .offset - .cur
			if - < maxMatchOffset && uint32() == load3232(, .offset-.cur) {
				// Found a 4 match...
				 = .bTable[hash7(, tableBits)]

				// If the next long is a candidate, check if we should use that instead...
				 :=  - (.offset - .cur)
				if  < maxMatchOffset && load3232(, .offset-.cur) == uint32() {
					,  := matchLen([+4:], [+4:]), matchLen([+4:], [-+4:])
					if  >  {
						 = 
						 = .offset - .cur
					}
				}
				break
			}
			 = 
		}

		// A 4-byte match has been found. We'll later see if more than 4 bytes
		// match. But, prior to the match, src[nextEmit:s] are unmatched. Emit
		// them as literal bytes.

		// Extend the 4-byte match as long as possible.
		 := .matchlenLong(+4, +4, ) + 4

		// Extend backwards
		for  > 0 &&  >  && [-1] == [-1] {
			--
			--
			++
		}
		if  <  {
			if false {
				emitLiteral(, [:])
			} else {
				for ,  := range [:] {
					.tokens[.n] = token()
					.litHist[]++
					.n++
				}
			}
		}
		if debugDeflate {
			if  >=  {
				panic("s-t")
			}
			if ( - ) > maxMatchOffset {
				panic(fmt.Sprintln("mmo", ))
			}
			if  < baseMatchLength {
				panic("bml")
			}
		}

		.AddMatchLong(, uint32(--baseMatchOffset))
		 += 
		 = 
		if  >=  {
			 =  + 1
		}

		if  >=  {
			// Index first pair after match end.
			if int(+8) < len() {
				 := load6432(, )
				.table[hashLen(, tableBits, )] = tableEntry{offset:  + .cur}
				.bTable[hash7(, tableBits)] = tableEntry{offset:  + .cur}
			}
			goto 
		}

		// Store every 3rd hash in-between
		if true {
			 := 
			if  < -1 {
				 := load6432(, )
				 := tableEntry{offset:  + .cur}
				 := tableEntry{offset: .offset + 1}
				.bTable[hash7(, tableBits)] = 
				.bTable[hash7(>>8, tableBits)] = 
				.table[hashLen(>>8, tableBits, )] = 

				 += 3
				for ;  < -1;  += 3 {
					 := load6432(, )
					 := tableEntry{offset:  + .cur}
					 := tableEntry{offset: .offset + 1}
					.bTable[hash7(, tableBits)] = 
					.bTable[hash7(>>8, tableBits)] = 
					.table[hashLen(>>8, tableBits, )] = 
				}
			}
		}

		// We could immediately start working at s now, but to improve
		// compression we first update the hash table at s-1 and at s.
		 := load6432(, -1)
		 := .cur +  - 1
		 := hashLen(, tableBits, )
		 := hash7(, tableBits)
		.table[] = tableEntry{offset: }
		.bTable[] = tableEntry{offset: }
		 =  >> 8
	}

:
	if int() < len() {
		// If nothing was added, don't encode literals.
		if .n == 0 {
			return
		}

		emitLiteral(, [:])
	}
}