package brotli

/* Copyright 2013 Google Inc. All Rights Reserved.

   Distributed under MIT license.
   See file LICENSE for detail or copy at https://opensource.org/licenses/MIT
*/

/* Functions to estimate the bit cost of Huffman trees. */
func shannonEntropy( []uint32,  uint,  *uint) float64 {
	var  uint = 0
	var  float64 = 0
	var  []uint32 = [:]
	var  uint
	for -cap() < -cap() {
		 = uint([0])
		 = [1:]
		 += 
		 -= float64() * fastLog2()
	}

	if  != 0 {
		 += float64() * fastLog2()
	}
	* = 
	return 
}

func bitsEntropy( []uint32,  uint) float64 {
	var  uint
	var  float64 = shannonEntropy(, , &)
	if  < float64() {
		/* At least one bit per literal is needed. */
		 = float64()
	}

	return 
}

const kOneSymbolHistogramCost float64 = 12
const kTwoSymbolHistogramCost float64 = 20
const kThreeSymbolHistogramCost float64 = 28
const kFourSymbolHistogramCost float64 = 37

func populationCostLiteral( *histogramLiteral) float64 {
	var  uint = histogramDataSizeLiteral()
	var  int = 0
	var  [5]uint
	var  float64 = 0.0
	var  uint
	if .total_count_ == 0 {
		return kOneSymbolHistogramCost
	}

	for  = 0;  < ; ++ {
		if .data_[] > 0 {
			[] = 
			++
			if  > 4 {
				break
			}
		}
	}

	if  == 1 {
		return kOneSymbolHistogramCost
	}

	if  == 2 {
		return kTwoSymbolHistogramCost + float64(.total_count_)
	}

	if  == 3 {
		var  uint32 = .data_[[0]]
		var  uint32 = .data_[[1]]
		var  uint32 = .data_[[2]]
		var  uint32 = brotli_max_uint32_t(, brotli_max_uint32_t(, ))
		return kThreeSymbolHistogramCost + 2*(float64()+float64()+float64()) - float64()
	}

	if  == 4 {
		var  [4]uint32
		var  uint32
		var  uint32
		for  = 0;  < 4; ++ {
			[] = .data_[[]]
		}

		/* Sort */
		for  = 0;  < 4; ++ {
			var  uint
			for  =  + 1;  < 4; ++ {
				if [] > [] {
					var  uint32 = []
					[] = []
					[] = 
				}
			}
		}

		 = [2] + [3]
		 = brotli_max_uint32_t(, [0])
		return kFourSymbolHistogramCost + 3*float64() + 2*(float64([0])+float64([1])) - float64()
	}
	{
		var  uint = 1
		var  = [codeLengthCodes]uint32{0}
		/* In this loop we compute the entropy of the histogram and simultaneously
		   build a simplified histogram of the code length codes where we use the
		   zero repeat code 17, but we don't use the non-zero repeat code 16. */

		var  float64 = fastLog2(.total_count_)
		for  = 0;  < ; {
			if .data_[] > 0 {
				var  float64 =  - fastLog2(uint(.data_[]))
				/* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) =
				   = log2(total_count) - log2(count(symbol)) */

				var  uint = uint( + 0.5)
				/* Approximate the bit depth by round(-log2(P(symbol))) */
				 += float64(.data_[]) * 

				if  > 15 {
					 = 15
				}

				if  >  {
					 = 
				}

				[]++
				++
			} else {
				var  uint32 = 1
				/* Compute the run length of zeros and add the appropriate number of 0
				   and 17 code length codes to the code length code histogram. */

				var  uint
				for  =  + 1;  <  && .data_[] == 0; ++ {
					++
				}

				 += uint()
				if  ==  {
					/* Don't add any cost for the last zero run, since these are encoded
					   only implicitly. */
					break
				}

				if  < 3 {
					[0] += 
				} else {
					 -= 2
					for  > 0 {
						[repeatZeroCodeLength]++

						/* Add the 3 extra bits for the 17 code length code. */
						 += 3

						 >>= 3
					}
				}
			}
		}

		/* Add the estimated encoding cost of the code length code histogram. */
		 += float64(18 + 2*)

		/* Add the entropy of the code length code histogram. */
		 += bitsEntropy([:], codeLengthCodes)
	}

	return 
}

func populationCostCommand( *histogramCommand) float64 {
	var  uint = histogramDataSizeCommand()
	var  int = 0
	var  [5]uint
	var  float64 = 0.0
	var  uint
	if .total_count_ == 0 {
		return kOneSymbolHistogramCost
	}

	for  = 0;  < ; ++ {
		if .data_[] > 0 {
			[] = 
			++
			if  > 4 {
				break
			}
		}
	}

	if  == 1 {
		return kOneSymbolHistogramCost
	}

	if  == 2 {
		return kTwoSymbolHistogramCost + float64(.total_count_)
	}

	if  == 3 {
		var  uint32 = .data_[[0]]
		var  uint32 = .data_[[1]]
		var  uint32 = .data_[[2]]
		var  uint32 = brotli_max_uint32_t(, brotli_max_uint32_t(, ))
		return kThreeSymbolHistogramCost + 2*(float64()+float64()+float64()) - float64()
	}

	if  == 4 {
		var  [4]uint32
		var  uint32
		var  uint32
		for  = 0;  < 4; ++ {
			[] = .data_[[]]
		}

		/* Sort */
		for  = 0;  < 4; ++ {
			var  uint
			for  =  + 1;  < 4; ++ {
				if [] > [] {
					var  uint32 = []
					[] = []
					[] = 
				}
			}
		}

		 = [2] + [3]
		 = brotli_max_uint32_t(, [0])
		return kFourSymbolHistogramCost + 3*float64() + 2*(float64([0])+float64([1])) - float64()
	}
	{
		var  uint = 1
		var  = [codeLengthCodes]uint32{0}
		/* In this loop we compute the entropy of the histogram and simultaneously
		   build a simplified histogram of the code length codes where we use the
		   zero repeat code 17, but we don't use the non-zero repeat code 16. */

		var  float64 = fastLog2(.total_count_)
		for  = 0;  < ; {
			if .data_[] > 0 {
				var  float64 =  - fastLog2(uint(.data_[]))
				/* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) =
				   = log2(total_count) - log2(count(symbol)) */

				var  uint = uint( + 0.5)
				/* Approximate the bit depth by round(-log2(P(symbol))) */
				 += float64(.data_[]) * 

				if  > 15 {
					 = 15
				}

				if  >  {
					 = 
				}

				[]++
				++
			} else {
				var  uint32 = 1
				/* Compute the run length of zeros and add the appropriate number of 0
				   and 17 code length codes to the code length code histogram. */

				var  uint
				for  =  + 1;  <  && .data_[] == 0; ++ {
					++
				}

				 += uint()
				if  ==  {
					/* Don't add any cost for the last zero run, since these are encoded
					   only implicitly. */
					break
				}

				if  < 3 {
					[0] += 
				} else {
					 -= 2
					for  > 0 {
						[repeatZeroCodeLength]++

						/* Add the 3 extra bits for the 17 code length code. */
						 += 3

						 >>= 3
					}
				}
			}
		}

		/* Add the estimated encoding cost of the code length code histogram. */
		 += float64(18 + 2*)

		/* Add the entropy of the code length code histogram. */
		 += bitsEntropy([:], codeLengthCodes)
	}

	return 
}

func populationCostDistance( *histogramDistance) float64 {
	var  uint = histogramDataSizeDistance()
	var  int = 0
	var  [5]uint
	var  float64 = 0.0
	var  uint
	if .total_count_ == 0 {
		return kOneSymbolHistogramCost
	}

	for  = 0;  < ; ++ {
		if .data_[] > 0 {
			[] = 
			++
			if  > 4 {
				break
			}
		}
	}

	if  == 1 {
		return kOneSymbolHistogramCost
	}

	if  == 2 {
		return kTwoSymbolHistogramCost + float64(.total_count_)
	}

	if  == 3 {
		var  uint32 = .data_[[0]]
		var  uint32 = .data_[[1]]
		var  uint32 = .data_[[2]]
		var  uint32 = brotli_max_uint32_t(, brotli_max_uint32_t(, ))
		return kThreeSymbolHistogramCost + 2*(float64()+float64()+float64()) - float64()
	}

	if  == 4 {
		var  [4]uint32
		var  uint32
		var  uint32
		for  = 0;  < 4; ++ {
			[] = .data_[[]]
		}

		/* Sort */
		for  = 0;  < 4; ++ {
			var  uint
			for  =  + 1;  < 4; ++ {
				if [] > [] {
					var  uint32 = []
					[] = []
					[] = 
				}
			}
		}

		 = [2] + [3]
		 = brotli_max_uint32_t(, [0])
		return kFourSymbolHistogramCost + 3*float64() + 2*(float64([0])+float64([1])) - float64()
	}
	{
		var  uint = 1
		var  = [codeLengthCodes]uint32{0}
		/* In this loop we compute the entropy of the histogram and simultaneously
		   build a simplified histogram of the code length codes where we use the
		   zero repeat code 17, but we don't use the non-zero repeat code 16. */

		var  float64 = fastLog2(.total_count_)
		for  = 0;  < ; {
			if .data_[] > 0 {
				var  float64 =  - fastLog2(uint(.data_[]))
				/* Compute -log2(P(symbol)) = -log2(count(symbol)/total_count) =
				   = log2(total_count) - log2(count(symbol)) */

				var  uint = uint( + 0.5)
				/* Approximate the bit depth by round(-log2(P(symbol))) */
				 += float64(.data_[]) * 

				if  > 15 {
					 = 15
				}

				if  >  {
					 = 
				}

				[]++
				++
			} else {
				var  uint32 = 1
				/* Compute the run length of zeros and add the appropriate number of 0
				   and 17 code length codes to the code length code histogram. */

				var  uint
				for  =  + 1;  <  && .data_[] == 0; ++ {
					++
				}

				 += uint()
				if  ==  {
					/* Don't add any cost for the last zero run, since these are encoded
					   only implicitly. */
					break
				}

				if  < 3 {
					[0] += 
				} else {
					 -= 2
					for  > 0 {
						[repeatZeroCodeLength]++

						/* Add the 3 extra bits for the 17 code length code. */
						 += 3

						 >>= 3
					}
				}
			}
		}

		/* Add the estimated encoding cost of the code length code histogram. */
		 += float64(18 + 2*)

		/* Add the entropy of the code length code histogram. */
		 += bitsEntropy([:], codeLengthCodes)
	}

	return 
}