Commit a6975b89 authored by unknown's avatar unknown

Merge mysql.com:/home/mydev/mysql-5.0

into mysql.com:/home/mydev/mysql-5.0-bug8321

parents 1960fbcf 12a640e2
......@@ -367,6 +367,7 @@ enum ha_base_keytype {
#define HA_STATE_EXTEND_BLOCK 2048
#define HA_STATE_RNEXT_SAME 4096 /* rnext_same was called */
/* myisampack expects no more than 32 field types. */
enum en_fieldtype {
FIELD_LAST=-1,FIELD_NORMAL,FIELD_SKIP_ENDSPACE,FIELD_SKIP_PRESPACE,
FIELD_SKIP_ZERO,FIELD_BLOB,FIELD_CONSTANT,FIELD_INTERVALL,FIELD_ZERO,
......
......@@ -416,8 +416,19 @@ static uint find_longest_bitstream(uint16 *table, uint16 *end)
}
/* Read record from datafile */
/* Returns length of packed record, -1 if error */
/*
Read record from datafile.
SYNOPSIS
_mi_read_pack_record()
info A pointer to MI_INFO.
filepos File offset of the record.
buf RETURN The buffer to receive the record.
RETURN
0 on success
HA_ERR_WRONG_IN_RECORD or -1 on error
*/
int _mi_read_pack_record(MI_INFO *info, my_off_t filepos, byte *buf)
{
......
......@@ -33,10 +33,10 @@
#include <my_getopt.h>
#include <assert.h>
#if INT_MAX > 32767
#define BITS_SAVED 32
#if SIZEOF_LONG_LONG > 4
#define BITS_SAVED 64
#else
#define BITS_SAVED 16
#define BITS_SAVED 32
#endif
#define IS_OFFSET ((uint) 32768) /* Bit if offset or char in tree */
......@@ -49,10 +49,10 @@
struct st_file_buffer {
File file;
char *buffer,*pos,*end;
uchar *buffer,*pos,*end;
my_off_t pos_in_file;
int bits;
uint current_byte;
ulonglong bitbucket;
};
struct st_huff_tree;
......@@ -69,13 +69,17 @@ typedef struct st_huff_counts {
my_off_t end_space[8];
my_off_t pre_space[8];
my_off_t tot_end_space,tot_pre_space,zero_fields,empty_fields,bytes_packed;
TREE int_tree;
byte *tree_buff;
byte *tree_pos;
TREE int_tree; /* Tree for detecting distinct column values. */
byte *tree_buff; /* Column values, 'field_length' each. */
byte *tree_pos; /* Points to end of column values in 'tree_buff'. */
} HUFF_COUNTS;
typedef struct st_huff_element HUFF_ELEMENT;
/*
WARNING: It is crucial for the optimizations in calc_packed_length()
that 'count' is the first element of 'HUFF_ELEMENT'.
*/
struct st_huff_element {
my_off_t count;
union un_element {
......@@ -98,7 +102,7 @@ typedef struct st_huff_tree {
my_off_t bytes_packed;
uint tree_pack_length;
uint min_chr,max_chr,char_bits,offset_bits,max_offset,height;
ulong *code;
ulonglong *code;
uchar *code_len;
} HUFF_TREE;
......@@ -146,7 +150,7 @@ static uint join_same_trees(HUFF_COUNTS *huff_counts,uint trees);
static int make_huff_decode_table(HUFF_TREE *huff_tree,uint trees);
static void make_traverse_code_tree(HUFF_TREE *huff_tree,
HUFF_ELEMENT *element,uint size,
ulong code);
ulonglong code);
static int write_header(PACK_MRG_INFO *isam_file, uint header_length,uint trees,
my_off_t tot_elements,my_off_t filelength);
static void write_field_info(HUFF_COUNTS *counts, uint fields,uint trees);
......@@ -161,7 +165,7 @@ static char *make_old_name(char *new_name,char *old_name);
static void init_file_buffer(File file,pbool read_buffer);
static int flush_buffer(ulong neaded_length);
static void end_file_buffer(void);
static void write_bits(ulong value,uint bits);
static void write_bits(ulonglong value, uint bits);
static void flush_bits(void);
static int save_state(MI_INFO *isam_file,PACK_MRG_INFO *mrg,my_off_t new_length,
ha_checksum crc);
......@@ -170,13 +174,23 @@ static int save_state_mrg(File file,PACK_MRG_INFO *isam_file,my_off_t new_length
static int mrg_close(PACK_MRG_INFO *mrg);
static int mrg_rrnd(PACK_MRG_INFO *info,byte *buf);
static void mrg_reset(PACK_MRG_INFO *mrg);
#if !defined(DBUG_OFF)
static void fakebigcodes(HUFF_COUNTS *huff_counts, HUFF_COUNTS *end_count);
static int fakecmp(my_off_t **count1, my_off_t **count2);
#endif
static int error_on_write=0,test_only=0,verbose=0,silent=0,
write_loop=0,force_pack=0, isamchk_neaded=0;
static int tmpfile_createflag=O_RDWR | O_TRUNC | O_EXCL;
static my_bool backup, opt_wait;
static uint tree_buff_length=8196-MALLOC_OVERHEAD;
/*
tree_buff_length is somewhat arbitrary. The bigger it is the better
the chance to win in terms of compression factor. On the other hand,
this table becomes part of the compressed file header. And its length
is coded with 16 bits in the header. Hence the limit is 2**16 - 1.
*/
static uint tree_buff_length= 65536 - MALLOC_OVERHEAD;
static char tmp_dir[FN_REFLEN]={0},*join_table;
static my_off_t intervall_length;
static ha_checksum glob_crc;
......@@ -225,7 +239,8 @@ int main(int argc, char **argv)
}
if (ok && isamchk_neaded && !silent)
puts("Remember to run myisamchk -rq on compressed tables");
VOID(fflush(stdout)); VOID(fflush(stderr));
VOID(fflush(stdout));
VOID(fflush(stderr));
free_defaults(default_argv);
my_end(verbose ? MY_CHECK_ERROR | MY_GIVE_INFO : MY_CHECK_ERROR);
exit(error ? 2 : 0);
......@@ -260,7 +275,7 @@ static struct my_option my_long_options[] =
0, 0, 0, GET_STR, REQUIRED_ARG, 0, 0, 0, 0, 0, 0},
{"test", 't', "Don't pack table, only test packing it.",
0, 0, 0, GET_NO_ARG, NO_ARG, 0, 0, 0, 0, 0, 0},
{"verbose", 'v', "Write info about progress and packing result.",
{"verbose", 'v', "Write info about progress and packing result. Use many -v for more verbosity!",
0, 0, 0, GET_NO_ARG, NO_ARG, 0, 0, 0, 0, 0, 0},
{"version", 'V', "Output version information and exit.",
0, 0, 0, GET_NO_ARG, NO_ARG, 0, 0, 0, 0, 0, 0},
......@@ -273,7 +288,8 @@ static struct my_option my_long_options[] =
static void print_version(void)
{
printf("%s Ver 1.22 for %s on %s\n", my_progname, SYSTEM_TYPE, MACHINE_TYPE);
VOID(printf("%s Ver 1.22 for %s on %s\n",
my_progname, SYSTEM_TYPE, MACHINE_TYPE));
NETWARE_SET_SCREEN_MODE(1);
}
......@@ -290,7 +306,7 @@ static void usage(void)
puts("afterwards to update the keys.");
puts("You should give the .MYI file as the filename argument.");
printf("\nUsage: %s [OPTIONS] filename...\n", my_progname);
VOID(printf("\nUsage: %s [OPTIONS] filename...\n", my_progname));
my_print_help(my_long_options);
print_defaults("my", load_default_groups);
my_print_variables(my_long_options);
......@@ -314,7 +330,10 @@ get_one_option(int optid, const struct my_option *opt __attribute__((unused)),
silent= 1;
break;
case 't':
test_only= verbose= 1;
test_only= 1;
/* Avoid to reset 'verbose' if it was already set > 1. */
if (! verbose)
verbose= 1;
break;
case 'T':
length= (uint) (strmov(tmp_dir, argument) - tmp_dir);
......@@ -325,7 +344,7 @@ get_one_option(int optid, const struct my_option *opt __attribute__((unused)),
}
break;
case 'v':
verbose= 1;
verbose++; /* Allow for selecting the level of verbosity. */
silent= 0;
break;
case '#':
......@@ -380,7 +399,7 @@ static MI_INFO *open_isam_file(char *name,int mode)
(opt_wait ? HA_OPEN_WAIT_IF_LOCKED :
HA_OPEN_ABORT_IF_LOCKED))))
{
VOID(fprintf(stderr,"%s gave error %d on open\n",name,my_errno));
VOID(fprintf(stderr, "%s gave error %d on open\n", name, my_errno));
DBUG_RETURN(0);
}
share=isam_file->s;
......@@ -388,7 +407,7 @@ static MI_INFO *open_isam_file(char *name,int mode)
{
if (!force_pack)
{
VOID(fprintf(stderr,"%s is already compressed\n",name));
VOID(fprintf(stderr, "%s is already compressed\n", name));
VOID(mi_close(isam_file));
DBUG_RETURN(0);
}
......@@ -400,7 +419,7 @@ static MI_INFO *open_isam_file(char *name,int mode)
(share->state.state.records <= 1 ||
share->state.state.data_file_length < 1024))
{
VOID(fprintf(stderr,"%s is too small to compress\n",name));
VOID(fprintf(stderr, "%s is too small to compress\n", name));
VOID(mi_close(isam_file));
DBUG_RETURN(0);
}
......@@ -446,8 +465,8 @@ static bool open_isam_files(PACK_MRG_INFO *mrg,char **names,uint count)
return 0;
diff_file:
fprintf(stderr,"%s: Tables '%s' and '%s' are not identical\n",
my_progname,names[j],names[j+1]);
VOID(fprintf(stderr, "%s: Tables '%s' and '%s' are not identical\n",
my_progname, names[j], names[j+1]));
error:
while (i--)
mi_close(mrg->file[i]);
......@@ -518,16 +537,25 @@ static int compress(PACK_MRG_INFO *mrg,char *result_table)
mrg->records=0;
for (i=0 ; i < mrg->count ; i++)
mrg->records+=mrg->file[i]->s->state.state.records;
DBUG_PRINT("info", ("Compressing %s: (%lu records)",
result_table ? new_name : org_name,
(ulong) mrg->records));
if (write_loop || verbose)
{
printf("Compressing %s: (%lu records)\n",
result_table ? new_name : org_name,(ulong) mrg->records);
VOID(printf("Compressing %s: (%lu records)\n",
result_table ? new_name : org_name, (ulong) mrg->records));
}
trees=fields=share->base.fields;
huff_counts=init_huff_count(isam_file,mrg->records);
QUICK_SAFEMALLOC;
/*
Read the whole data file(s) for statistics.
*/
DBUG_PRINT("info", ("- Calculating statistics"));
if (write_loop || verbose)
printf("- Calculating statistics\n");
VOID(printf("- Calculating statistics\n"));
if (get_statistic(mrg,huff_counts))
goto err;
NORMAL_SAFEMALLOC;
......@@ -536,29 +564,74 @@ static int compress(PACK_MRG_INFO *mrg,char *result_table)
old_length+= (mrg->file[i]->s->state.state.data_file_length -
mrg->file[i]->s->state.state.empty);
/*
Create a global priority queue in preparation for making
temporary Huffman trees.
*/
if (init_queue(&queue,256,0,0,compare_huff_elements,0))
goto err;
/*
Check each column if we should use pre-space-compress, end-space-
compress, empty-field-compress or zero-field-compress.
*/
check_counts(huff_counts,fields,mrg->records);
/*
Build a Huffman tree for each column.
*/
huff_trees=make_huff_trees(huff_counts,trees);
/*
If the packed lengths of combined columns is less then the sum of
the non-combined columns, then create common Huffman trees for them.
We do this only for byte compressed columns, not for distinct values
compressed columns.
*/
if ((int) (used_trees=join_same_trees(huff_counts,trees)) < 0)
goto err;
/*
Assign codes to all byte or column values.
*/
if (make_huff_decode_table(huff_trees,fields))
goto err;
/* Prepare a file buffer. */
init_file_buffer(new_file,0);
/*
Reserve space in the target file for the fixed compressed file header.
*/
file_buffer.pos_in_file=HEAD_LENGTH;
if (! test_only)
VOID(my_seek(new_file,file_buffer.pos_in_file,MY_SEEK_SET,MYF(0)));
/*
Write field infos: field type, pack type, length bits, tree number.
*/
write_field_info(huff_counts,fields,used_trees);
/*
Write decode trees.
*/
if (!(tot_elements=write_huff_tree(huff_trees,trees)))
goto err;
/*
Calculate the total length of the compression info header.
This includes the fixed compressed file header, the column compression
type descriptions, and the decode trees.
*/
header_length=(uint) file_buffer.pos_in_file+
(uint) (file_buffer.pos-file_buffer.buffer);
/* Compress file */
/*
Compress the source file into the target file.
*/
DBUG_PRINT("info", ("- Compressing file"));
if (write_loop || verbose)
printf("- Compressing file\n");
VOID(printf("- Compressing file\n"));
error=compress_isam_file(mrg,huff_counts);
new_length=file_buffer.pos_in_file;
if (!error && !test_only)
......@@ -568,16 +641,28 @@ static int compress(PACK_MRG_INFO *mrg,char *result_table)
error=my_write(file_buffer.file,buff,sizeof(buff),
MYF(MY_WME | MY_NABP | MY_WAIT_IF_FULL)) != 0;
}
/*
Write the fixed compressed file header.
*/
if (!error)
error=write_header(mrg,header_length,used_trees,tot_elements,
new_length);
/* Flush the file buffer. */
end_file_buffer();
/* Display statistics. */
DBUG_PRINT("info", ("Min record length: %6d Max length: %6d "
"Mean total length: %6ld\n",
mrg->min_pack_length, mrg->max_pack_length,
(ulong) (mrg->records ? (new_length/mrg->records) : 0)));
if (verbose && mrg->records)
printf("Min record length: %6d Max length: %6d Mean total length: %6ld\n",
mrg->min_pack_length,mrg->max_pack_length,
(ulong) (new_length/mrg->records));
VOID(printf("Min record length: %6d Max length: %6d "
"Mean total length: %6ld\n", mrg->min_pack_length,
mrg->max_pack_length, (ulong) (new_length/mrg->records)));
/* Close source and target file. */
if (!test_only)
{
error|=my_close(new_file,MYF(MY_WME));
......@@ -588,6 +673,7 @@ static int compress(PACK_MRG_INFO *mrg,char *result_table)
}
}
/* Cleanup. */
free_counts_and_tree_and_queue(huff_trees,trees,huff_counts,fields);
if (! test_only && ! error)
{
......@@ -629,15 +715,16 @@ static int compress(PACK_MRG_INFO *mrg,char *result_table)
error|=my_close(join_isam_file,MYF(MY_WME));
if (error)
{
VOID(fprintf(stderr,"Aborting: %s is not compressed\n",org_name));
VOID(fprintf(stderr, "Aborting: %s is not compressed\n", org_name));
VOID(my_delete(new_name,MYF(MY_WME)));
DBUG_RETURN(-1);
}
if (write_loop || verbose)
{
if (old_length)
printf("%.4g%% \n", (((longlong) (old_length -new_length))*100.0/
(longlong) old_length));
VOID(printf("%.4g%% \n",
(((longlong) (old_length - new_length)) * 100.0 /
(longlong) old_length)));
else
puts("Empty file saved in compressed format");
}
......@@ -650,7 +737,7 @@ static int compress(PACK_MRG_INFO *mrg,char *result_table)
if (join_isam_file >= 0)
VOID(my_close(join_isam_file,MYF(0)));
mrg_close(mrg);
VOID(fprintf(stderr,"Aborted: %s is not compressed\n",org_name));
VOID(fprintf(stderr, "Aborted: %s is not compressed\n", org_name));
DBUG_RETURN(-1);
}
......@@ -677,6 +764,12 @@ static HUFF_COUNTS *init_huff_count(MI_INFO *info,my_off_t records)
(type == FIELD_NORMAL ||
type == FIELD_SKIP_ZERO))
count[i].max_zero_fill= count[i].field_length;
/*
For every column initialize a tree, which is used to detect distinct
column values. 'int_tree' works together with 'tree_buff' and
'tree_pos'. It's keys are implemented by pointers into 'tree_buff'.
This is accomplished by '-1' as the element size.
*/
init_tree(&count[i].int_tree,0,0,-1,(qsort_cmp2) compare_tree,0, NULL,
NULL);
if (records && type != FIELD_BLOB && type != FIELD_VARCHAR)
......@@ -762,10 +855,13 @@ static int get_statistic(PACK_MRG_INFO *mrg,HUFF_COUNTS *huff_counts)
ulong tot_blob_length=0;
if (! error)
{
/* glob_crc is a checksum over all bytes of all records. */
if (static_row_size)
glob_crc+=mi_static_checksum(mrg->file[0],record);
else
glob_crc+=mi_checksum(mrg->file[0],record);
/* Count the incidence of values separately for every column. */
for (pos=record,count=huff_counts ;
count < end_count ;
count++,
......@@ -773,15 +869,48 @@ static int get_statistic(PACK_MRG_INFO *mrg,HUFF_COUNTS *huff_counts)
{
next_pos=end_pos=(start_pos=pos)+count->field_length;
/* Put value in tree if there is room for it */
/*
Put the whole column value in a tree if there is room for it.
'int_tree' is used to quickly check for duplicate values.
'tree_buff' collects as many distinct column values as
possible. If the field length is > 1, it is tree_buff_length,
else 2 bytes. Each value is 'field_length' bytes big. If there
are more distinct column values than fit into the buffer, we
give up with this tree. BLOBs and VARCHARs do not have a
tree_buff as it can only be used with fixed length columns.
For the special case of field length == 1, we handle only the
case that there is only one distinct value in the table(s).
Otherwise, we can have a maximum of 256 distinct values. This
is then handled by the normal Huffman tree build.
Another limit for collecting distinct column values is the
number of values itself. Since we would need to build a
Huffman tree for the values, we are limited by the 'IS_OFFSET'
constant. This constant expresses a bit which is used to
determine if a tree element holds a final value or an offset
to a child element. Hence, all values and offsets need to be
smaller than 'IS_OFFSET'. A tree element is implemented with
two integer values, one for the left branch and one for the
right branch. For the extreme case that the first element
points to the last element, the number of integers in the tree
must be less or equal to IS_OFFSET. So the number of elements
must be less or equal to IS_OFFSET / 2.
WARNING: At first, we insert a pointer into the record buffer
as the key for the tree. If we got a new distinct value, which
is really inserted into the tree, instead of being counted
only, we will copy the column value from the record buffer to
'tree_buff' and adjust the key pointer of the tree accordingly.
*/
if (count->tree_buff)
{
global_count=count;
if (!(element=tree_insert(&count->int_tree,pos, 0,
count->int_tree.custom_arg)) ||
(element->count == 1 &&
count->tree_buff + tree_buff_length <
count->tree_pos + count->field_length) ||
(count->tree_buff + tree_buff_length <
count->tree_pos + count->field_length)) ||
(count->int_tree.elements_in_tree > IS_OFFSET / 2) ||
(count->field_length == 1 &&
count->int_tree.elements_in_tree > 1))
{
......@@ -791,10 +920,17 @@ static int get_statistic(PACK_MRG_INFO *mrg,HUFF_COUNTS *huff_counts)
}
else
{
/*
If tree_insert() succeeds, it either creates a new element
or increments the counter of an existing element.
*/
if (element->count == 1)
{ /* New element */
{
/* Copy the new column value into 'tree_buff'. */
memcpy(count->tree_pos,pos,(size_t) count->field_length);
/* Adjust the key pointer in the tree. */
tree_set_pointer(element,count->tree_pos);
/* Point behind the last column value so far. */
count->tree_pos+=count->field_length;
}
}
......@@ -804,15 +940,21 @@ static int get_statistic(PACK_MRG_INFO *mrg,HUFF_COUNTS *huff_counts)
if (count->field_type == FIELD_NORMAL ||
count->field_type == FIELD_SKIP_ENDSPACE)
{
/* Ignore trailing space. */
for ( ; end_pos > pos ; end_pos--)
if (end_pos[-1] != ' ')
break;
/* Empty fields are just counted. Go to the next record. */
if (end_pos == pos)
{
count->empty_fields++;
count->max_zero_fill=0;
continue;
}
/*
Count the total of all trailing spaces and the number of
short trailing spaces. Remember the longest trailing space.
*/
length= (uint) (next_pos-end_pos);
count->tot_end_space+=length;
if (length < 8)
......@@ -820,18 +962,25 @@ static int get_statistic(PACK_MRG_INFO *mrg,HUFF_COUNTS *huff_counts)
if (count->max_end_space < length)
count->max_end_space = length;
}
if (count->field_type == FIELD_NORMAL ||
count->field_type == FIELD_SKIP_PRESPACE)
{
/* Ignore leading space. */
for (pos=start_pos; pos < end_pos ; pos++)
if (pos[0] != ' ')
break;
/* Empty fields are just counted. Go to the next record. */
if (end_pos == pos)
{
count->empty_fields++;
count->max_zero_fill=0;
continue;
}
/*
Count the total of all leading spaces and the number of
short leading spaces. Remember the longest leading space.
*/
length= (uint) (pos-start_pos);
count->tot_pre_space+=length;
if (length < 8)
......@@ -839,6 +988,8 @@ static int get_statistic(PACK_MRG_INFO *mrg,HUFF_COUNTS *huff_counts)
if (count->max_pre_space < length)
count->max_pre_space = length;
}
/* Calculate pos, end_pos, and max_length for variable length fields. */
if (count->field_type == FIELD_BLOB)
{
uint field_length=count->field_length -mi_portable_sizeof_char_ptr;
......@@ -857,45 +1008,121 @@ static int get_statistic(PACK_MRG_INFO *mrg,HUFF_COUNTS *huff_counts)
end_pos= pos+length;
set_if_bigger(count->max_length,length);
}
/* Evaluate 'max_zero_fill' for short fields. */
if (count->field_length <= 8 &&
(count->field_type == FIELD_NORMAL ||
count->field_type == FIELD_SKIP_ZERO))
{
uint i;
/* Zero fields are just counted. Go to the next record. */
if (!memcmp((byte*) start_pos,zero_string,count->field_length))
{
count->zero_fields++;
continue;
}
/*
max_zero_fill starts with field_length. It is decreased every
time a shorter "zero trailer" is found. It is set to zero when
an empty field is found (see above). This suggests that the
variable should be called 'min_zero_fill'.
*/
for (i =0 ; i < count->max_zero_fill && ! end_pos[-1 - (int) i] ;
i++) ;
if (i < count->max_zero_fill)
count->max_zero_fill=i;
}
/* Ignore zero fields and check fields. */
if (count->field_type == FIELD_ZERO ||
count->field_type == FIELD_CHECK)
continue;
/*
Count the incidence of every byte value in the
significant field value.
*/
for ( ; pos < end_pos ; pos++)
count->counts[(uchar) *pos]++;
/* Step to next field. */
}
if (tot_blob_length > max_blob_length)
max_blob_length=tot_blob_length;
record_count++;
if (write_loop && record_count % WRITE_COUNT == 0)
{
printf("%lu\r",(ulong) record_count); VOID(fflush(stdout));
VOID(printf("%lu\r", (ulong) record_count));
VOID(fflush(stdout));
}
}
else if (error != HA_ERR_RECORD_DELETED)
{
fprintf(stderr,"Got error %d while reading rows",error);
VOID(fprintf(stderr, "Got error %d while reading rows", error));
break;
}
/* Step to next record. */
}
if (write_loop)
{
printf(" \r"); VOID(fflush(stdout));
VOID(printf(" \r"));
VOID(fflush(stdout));
}
/*
If --debug=d,fakebigcodes is set, fake the counts to get big Huffman
codes.
*/
DBUG_EXECUTE_IF("fakebigcodes", fakebigcodes(huff_counts, end_count););
DBUG_PRINT("info", ("Found the following number of incidents "
"of the byte codes:"));
if (verbose >= 2)
VOID(printf("Found the following number of incidents "
"of the byte codes:\n"));
for (count= huff_counts ; count < end_count; count++)
{
uint idx;
my_off_t total_count;
char llbuf[32];
DBUG_PRINT("info", ("column: %3u", count - huff_counts + 1));
if (verbose >= 2)
VOID(printf("column: %3u\n", count - huff_counts + 1));
if (count->tree_buff)
{
DBUG_PRINT("info", ("number of distinct values: %u",
(count->tree_pos - count->tree_buff) /
count->field_length));
if (verbose >= 2)
VOID(printf("number of distinct values: %u\n",
(count->tree_pos - count->tree_buff) /
count->field_length));
}
total_count= 0;
for (idx= 0; idx < 256; idx++)
{
if (count->counts[idx])
{
total_count+= count->counts[idx];
DBUG_PRINT("info", ("counts[0x%02x]: %12s", idx,
llstr((longlong) count->counts[idx], llbuf)));
if (verbose >= 2)
VOID(printf("counts[0x%02x]: %12s\n", idx,
llstr((longlong) count->counts[idx], llbuf)));
}
}
DBUG_PRINT("info", ("total: %12s", llstr((longlong) total_count,
llbuf)));
if ((verbose >= 2) && total_count)
{
VOID(printf("total: %12s\n",
llstr((longlong) total_count, llbuf)));
}
}
mrg->records=record_count;
mrg->max_blob_length=max_blob_length;
my_afree((gptr) record);
......@@ -944,9 +1171,14 @@ static void check_counts(HUFF_COUNTS *huff_counts, uint trees,
huff_counts->field_type=FIELD_NORMAL;
huff_counts->pack_type=0;
/* Check for zero-filled records (in this column), or zero records. */
if (huff_counts->zero_fields || ! records)
{
my_off_t old_space_count;
/*
If there are only zero filled records (in this column),
or no records at all, we are done.
*/
if (huff_counts->zero_fields == records)
{
huff_counts->field_type= FIELD_ZERO;
......@@ -954,14 +1186,22 @@ static void check_counts(HUFF_COUNTS *huff_counts, uint trees,
huff_counts->counts[0]=0;
goto found_pack;
}
/* Remeber the number of significant spaces. */
old_space_count=huff_counts->counts[' '];
huff_counts->counts[' ']+=huff_counts->tot_end_space+
huff_counts->tot_pre_space +
huff_counts->empty_fields * huff_counts->field_length;
/* Add all leading and trailing spaces. */
huff_counts->counts[' ']+= (huff_counts->tot_end_space +
huff_counts->tot_pre_space +
huff_counts->empty_fields *
huff_counts->field_length);
/* Check, what the compressed length of this would be. */
old_length=calc_packed_length(huff_counts,0)+records/8;
/* Get the number of zero bytes. */
length=huff_counts->zero_fields*huff_counts->field_length;
/* Add it to the counts. */
huff_counts->counts[0]+=length;
/* Check, what the compressed length of this would be. */
new_length=calc_packed_length(huff_counts,0);
/* If the compression without the zeroes would be shorter, we are done. */
if (old_length < new_length && huff_counts->field_length > 1)
{
huff_counts->field_type=FIELD_SKIP_ZERO;
......@@ -969,9 +1209,16 @@ static void check_counts(HUFF_COUNTS *huff_counts, uint trees,
huff_counts->bytes_packed=old_length- records/8;
goto found_pack;
}
/* Remove the insignificant spaces, but keep the zeroes. */
huff_counts->counts[' ']=old_space_count;
}
/* Check, what the compressed length of this column would be. */
huff_counts->bytes_packed=calc_packed_length(huff_counts,0);
/*
If there are enough empty records (in this column),
treating them specially may pay off.
*/
if (huff_counts->empty_fields)
{
if (huff_counts->field_length > 2 &&
......@@ -1003,6 +1250,11 @@ static void check_counts(HUFF_COUNTS *huff_counts, uint trees,
}
}
}
/*
If there are enough trailing spaces (in this column),
treating them specially may pay off.
*/
if (huff_counts->tot_end_space)
{
huff_counts->counts[' ']+=huff_counts->tot_pre_space;
......@@ -1012,6 +1264,11 @@ static void check_counts(HUFF_COUNTS *huff_counts, uint trees,
goto found_pack;
huff_counts->counts[' ']-=huff_counts->tot_pre_space;
}
/*
If there are enough leading spaces (in this column),
treating them specially may pay off.
*/
if (huff_counts->tot_pre_space)
{
if (test_space_compress(huff_counts,records,huff_counts->max_pre_space,
......@@ -1041,6 +1298,8 @@ static void check_counts(HUFF_COUNTS *huff_counts, uint trees,
{
HUFF_TREE tree;
DBUG_EXECUTE_IF("forceintervall",
huff_counts->bytes_packed= ~ (my_off_t) 0;);
tree.element_buffer=0;
if (!make_huff_tree(&tree,huff_counts) &&
tree.bytes_packed+tree.tree_pack_length < huff_counts->bytes_packed)
......@@ -1066,14 +1325,27 @@ static void check_counts(HUFF_COUNTS *huff_counts, uint trees,
fill_zero_fields++;
field_count[huff_counts->field_type]++;
}
DBUG_PRINT("info", ("normal: %3d empty-space: %3d "
"empty-zero: %3d empty-fill: %3d",
field_count[FIELD_NORMAL],space_fields,
field_count[FIELD_SKIP_ZERO],fill_zero_fields));
DBUG_PRINT("info", ("pre-space: %3d end-space: %3d "
"intervall-fields: %3d zero: %3d",
field_count[FIELD_SKIP_PRESPACE],
field_count[FIELD_SKIP_ENDSPACE],
field_count[FIELD_INTERVALL],
field_count[FIELD_ZERO]));
if (verbose)
printf("\nnormal: %3d empty-space: %3d empty-zero: %3d empty-fill: %3d\npre-space: %3d end-space: %3d intervall-fields: %3d zero: %3d\n",
field_count[FIELD_NORMAL],space_fields,
field_count[FIELD_SKIP_ZERO],fill_zero_fields,
field_count[FIELD_SKIP_PRESPACE],
field_count[FIELD_SKIP_ENDSPACE],
field_count[FIELD_INTERVALL],
field_count[FIELD_ZERO]);
VOID(printf("\nnormal: %3d empty-space: %3d "
"empty-zero: %3d empty-fill: %3d\n"
"pre-space: %3d end-space: %3d "
"intervall-fields: %3d zero: %3d\n",
field_count[FIELD_NORMAL],space_fields,
field_count[FIELD_SKIP_ZERO],fill_zero_fields,
field_count[FIELD_SKIP_PRESPACE],
field_count[FIELD_SKIP_ENDSPACE],
field_count[FIELD_INTERVALL],
field_count[FIELD_ZERO]));
DBUG_VOID_RETURN;
}
......@@ -1170,8 +1442,24 @@ static HUFF_TREE* make_huff_trees(HUFF_COUNTS *huff_counts, uint trees)
DBUG_RETURN(huff_tree);
}
/* Update huff_tree according to huff_counts->counts or
huff_counts->tree_buff */
/*
Build a Huffman tree.
SYNOPSIS
make_huff_tree()
huff_tree The Huffman tree.
huff_counts The counts.
DESCRIPTION
Build a Huffman tree according to huff_counts->counts or
huff_counts->tree_buff. tree_buff, if non-NULL contains up to
tree_buff_length of distinct column values. In that case, whole
values can be Huffman encoded instead of single bytes.
RETURN
0 OK
!= 0 Error
*/
static int make_huff_tree(HUFF_TREE *huff_tree, HUFF_COUNTS *huff_counts)
{
......@@ -1182,12 +1470,14 @@ static int make_huff_tree(HUFF_TREE *huff_tree, HUFF_COUNTS *huff_counts)
first=last=0;
if (huff_counts->tree_buff)
{
/* Calculate the number of distinct values in tree_buff. */
found= (uint) (huff_counts->tree_pos - huff_counts->tree_buff) /
huff_counts->field_length;
first=0; last=found-1;
}
else
{
/* Count the number of byte codes found in the column. */
for (i=found=0 ; i < 256 ; i++)
{
if (huff_counts->counts[i])
......@@ -1201,6 +1491,7 @@ static int make_huff_tree(HUFF_TREE *huff_tree, HUFF_COUNTS *huff_counts)
found=2;
}
/* When using 'tree_buff' we can have more that 256 values. */
if (queue.max_elements < found)
{
delete_queue(&queue);
......@@ -1208,6 +1499,7 @@ static int make_huff_tree(HUFF_TREE *huff_tree, HUFF_COUNTS *huff_counts)
return -1;
}
/* Allocate or reallocate an element buffer for the Huffman tree. */
if (!huff_tree->element_buffer)
{
if (!(huff_tree->element_buffer=
......@@ -1235,15 +1527,25 @@ static int make_huff_tree(HUFF_TREE *huff_tree, HUFF_COUNTS *huff_counts)
if (huff_counts->tree_buff)
{
huff_tree->elements=0;
tree_walk(&huff_counts->int_tree,
(int (*)(void*, element_count,void*)) save_counts_in_queue,
(gptr) huff_tree, left_root_right);
huff_tree->tree_pack_length=(1+15+16+5+5+
(huff_tree->char_bits+1)*found+
(huff_tree->offset_bits+1)*
(found-2)+7)/8 +
(uint) (huff_tree->counts->tree_pos-
huff_tree->counts->tree_buff);
/*
Put a HUFF_ELEMENT into the queue for every distinct column value.
tree_walk() calls save_counts_in_queue() for every element in
'int_tree'. This takes elements from the target trees element
buffer and places references to them into the buffer of the
priority queue. We insert in column value order, but the order is
in fact irrelevant here. We will establish the correct order
later.
*/
tree_walk(&huff_counts->int_tree,
(int (*)(void*, element_count,void*)) save_counts_in_queue,
(gptr) huff_tree, left_root_right);
}
else
{
......@@ -1252,7 +1554,15 @@ static int make_huff_tree(HUFF_TREE *huff_tree, HUFF_COUNTS *huff_counts)
(huff_tree->char_bits+1)*found+
(huff_tree->offset_bits+1)*
(found-2)+7)/8;
/*
Put a HUFF_ELEMENT into the queue for every byte code found in the column.
The elements are taken from the target trees element buffer.
Instead of using queue_insert(), we just place references to the
elements into the buffer of the priority queue. We insert in byte
value order, but the order is in fact irrelevant here. We will
establish the correct order later.
*/
for (i=first, found=0 ; i <= last ; i++)
{
if (huff_counts->counts[i])
......@@ -1264,8 +1574,13 @@ static int make_huff_tree(HUFF_TREE *huff_tree, HUFF_COUNTS *huff_counts)
queue.root[found]=(byte*) new_huff_el;
}
}
/*
If there is only a single byte value in this field in all records,
add a second element with zero incidence. This is required to enter
the loop, which builds the Huffman tree.
*/
while (found < 2)
{ /* Our huff_trees request at least 2 elements */
{
new_huff_el=huff_tree->element_buffer+(found++);
new_huff_el->count=0;
new_huff_el->a.leaf.null=0;
......@@ -1276,21 +1591,53 @@ static int make_huff_tree(HUFF_TREE *huff_tree, HUFF_COUNTS *huff_counts)
queue.root[found]=(byte*) new_huff_el;
}
}
/* Make a queue from the queue buffer. */
queue.elements=found;
/*
Make a priority queue from the queue. Construct its index so that we
have a partially ordered tree.
*/
for (i=found/2 ; i > 0 ; i--)
_downheap(&queue,i);
/* The Huffman algorithm. */
bytes_packed=0; bits_packed=0;
for (i=1 ; i < found ; i++)
{
/*
Pop the top element from the queue (the one with the least incidence).
Popping from a priority queue includes a re-ordering of the queue,
to get the next least incidence element to the top.
*/
a=(HUFF_ELEMENT*) queue_remove(&queue,0);
/*
Copy the next least incidence element. The queue implementation
reserves root[0] for temporary purposes. root[1] is the top.
*/
b=(HUFF_ELEMENT*) queue.root[1];
/* Get a new element from the element buffer. */
new_huff_el=huff_tree->element_buffer+found+i;
/* The new element gets the sum of the two least incidence elements. */
new_huff_el->count=a->count+b->count;
/*
The Huffman algorithm assigns another bit to the code for a byte
every time that bytes incidence is combined (directly or indirectly)
to a new element as one of the two least incidence elements.
This means that one more bit per incidence of that byte is required
in the resulting file. So we add the new combined incidence as the
number of bits by which the result grows.
*/
bits_packed+=(uint) (new_huff_el->count & 7);
bytes_packed+=new_huff_el->count/8;
new_huff_el->a.nod.left=a; /* lesser in left */
/* The new element points to its children, lesser in left. */
new_huff_el->a.nod.left=a;
new_huff_el->a.nod.right=b;
/*
Replace the copied top element by the new element and re-order the
queue.
*/
queue.root[1]=(byte*) new_huff_el;
queue_replaced(&queue);
}
......@@ -1309,7 +1656,26 @@ static int compare_tree(void* cmp_arg __attribute__((unused)),
return 0;
}
/* Used by make_huff_tree to save intervall-counts in queue */
/*
Organize distinct column values and their incidences into a priority queue.
SYNOPSIS
save_counts_in_queue()
key The column value.
count The incidence of this value.
tree The Huffman tree to be built later.
DESCRIPTION
We use the element buffer of the targeted tree. The distinct column
values are organized in a priority queue first. The Huffman
algorithm will later organize the elements into a Huffman tree. For
the time being, we just place references to the elements into the
queue buffer. The buffer will later be organized into a priority
queue.
RETURN
0
*/
static int save_counts_in_queue(byte *key, element_count count,
HUFF_TREE *tree)
......@@ -1326,8 +1692,23 @@ static int save_counts_in_queue(byte *key, element_count count,
}
/* Calculate length of file if given counts should be used */
/* Its actually a faster version of make_huff_tree */
/*
Calculate length of file if given counts should be used.
SYNOPSIS
calc_packed_length()
huff_counts The counts for a column of the table(s).
add_tree_lenght If the decode tree length should be added.
DESCRIPTION
We need to follow the Huffman algorithm until we know, how many bits
are required for each byte code. But we do not need the resulting
Huffman tree. Hence, we can leave out some steps which are essential
in make_huff_tree().
RETURN
Number of bytes required to compress this table column.
*/
static my_off_t calc_packed_length(HUFF_COUNTS *huff_counts,
uint add_tree_lenght)
......@@ -1337,6 +1718,23 @@ static my_off_t calc_packed_length(HUFF_COUNTS *huff_counts,
HUFF_ELEMENT element_buffer[256];
DBUG_ENTER("calc_packed_length");
/*
WARNING: We use a small hack for efficiency: Instead of placing
references to HUFF_ELEMENTs into the queue, we just insert
references to the counts of the byte codes which appeared in this
table column. During the Huffman algorithm they are successively
replaced by references to HUFF_ELEMENTs. This works, because
HUFF_ELEMENTs have the incidence count at their beginning.
Regardless, wether the queue array contains references to counts of
type my_off_t or references to HUFF_ELEMENTs which have the count of
type my_off_t at their beginning, it always points to a count of the
same type.
Instead of using queue_insert(), we just copy the references into
the buffer of the priority queue. We insert in byte value order, but
the order is in fact irrelevant here. We will establish the correct
order later.
*/
first=last=0;
for (i=found=0 ; i < 256 ; i++)
{
......@@ -1345,31 +1743,73 @@ static my_off_t calc_packed_length(HUFF_COUNTS *huff_counts,
if (! found++)
first=i;
last=i;
/* We start with root[1], which is the queues top element. */
queue.root[found]=(byte*) &huff_counts->counts[i];
}
}
if (!found)
DBUG_RETURN(0); /* Empty tree */
/*
If there is only a single byte value in this field in all records,
add a second element with zero incidence. This is required to enter
the loop, which follows the Huffman algorithm.
*/
if (found < 2)
queue.root[++found]=(byte*) &huff_counts->counts[last ? 0 : 1];
/* Make a queue from the queue buffer. */
queue.elements=found;
bytes_packed=0; bits_packed=0;
/* Add the length of the coding table, which would become part of the file. */
if (add_tree_lenght)
bytes_packed=(8+9+5+5+(max_bit(last-first)+1)*found+
(max_bit(found-1)+1+1)*(found-2) +7)/8;
/*
Make a priority queue from the queue. Construct its index so that we
have a partially ordered tree.
*/
for (i=(found+1)/2 ; i > 0 ; i--)
_downheap(&queue,i);
/* The Huffman algorithm. */
for (i=0 ; i < found-1 ; i++)
{
HUFF_ELEMENT *a,*b,*new_huff_el;
a=(HUFF_ELEMENT*) queue_remove(&queue,0);
b=(HUFF_ELEMENT*) queue.root[1];
new_huff_el=element_buffer+i;
new_huff_el->count=a->count+b->count;
my_off_t *a;
my_off_t *b;
HUFF_ELEMENT *new_huff_el;
/*
Pop the top element from the queue (the one with the least
incidence). Popping from a priority queue includes a re-ordering
of the queue, to get the next least incidence element to the top.
*/
a= (my_off_t*) queue_remove(&queue, 0);
/*
Copy the next least incidence element. The queue implementation
reserves root[0] for temporary purposes. root[1] is the top.
*/
b= (my_off_t*) queue.root[1];
/* Create a new element in a local (automatic) buffer. */
new_huff_el= element_buffer + i;
/* The new element gets the sum of the two least incidence elements. */
new_huff_el->count= *a + *b;
/*
The Huffman algorithm assigns another bit to the code for a byte
every time that bytes incidence is combined (directly or indirectly)
to a new element as one of the two least incidence elements.
This means that one more bit per incidence of that byte is required
in the resulting file. So we add the new combined incidence as the
number of bits by which the result grows.
*/
bits_packed+=(uint) (new_huff_el->count & 7);
bytes_packed+=new_huff_el->count/8;
/*
Replace the copied top element by the new element and re-order the
queue. This successively replaces the references to counts by
references to HUFF_ELEMENTs.
*/
queue.root[1]=(byte*) new_huff_el;
queue_replaced(&queue);
}
......@@ -1417,13 +1857,26 @@ static uint join_same_trees(HUFF_COUNTS *huff_counts, uint trees)
}
}
}
DBUG_PRINT("info", ("Original trees: %d After join: %d",
trees, tree_number));
if (verbose)
printf("Original trees: %d After join: %d\n",trees,tree_number);
VOID(printf("Original trees: %d After join: %d\n", trees, tree_number));
return tree_number; /* Return trees left */
}
/* Fill in huff_tree decode tables */
/*
Fill in huff_tree encode tables.
SYNOPSIS
make_huff_decode_table()
huff_tree An array of HUFF_TREE which are to be encoded.
trees The number of HUFF_TREE in the array.
RETURN
0 success
!= 0 error
*/
static int make_huff_decode_table(HUFF_TREE *huff_tree, uint trees)
{
......@@ -1434,12 +1887,13 @@ static int make_huff_decode_table(HUFF_TREE *huff_tree, uint trees)
{
elements=huff_tree->counts->tree_buff ? huff_tree->elements : 256;
if (!(huff_tree->code =
(ulong*) my_malloc(elements*
(sizeof(ulong)+sizeof(uchar)),
MYF(MY_WME | MY_ZEROFILL))))
(ulonglong*) my_malloc(elements*
(sizeof(ulonglong) + sizeof(uchar)),
MYF(MY_WME | MY_ZEROFILL))))
return 1;
huff_tree->code_len=(uchar*) (huff_tree->code+elements);
make_traverse_code_tree(huff_tree,huff_tree->root,32,0);
make_traverse_code_tree(huff_tree, huff_tree->root,
8 * sizeof(ulonglong), LL(0));
}
}
return 0;
......@@ -1448,28 +1902,90 @@ static int make_huff_decode_table(HUFF_TREE *huff_tree, uint trees)
static void make_traverse_code_tree(HUFF_TREE *huff_tree,
HUFF_ELEMENT *element,
uint size, ulong code)
uint size, ulonglong code)
{
uint chr;
if (!element->a.leaf.null)
{
chr=element->a.leaf.element_nr;
huff_tree->code_len[chr]=(uchar) (32-size);
huff_tree->code[chr]= (code >> size);
if (huff_tree->height < 32-size)
huff_tree->height= 32-size;
huff_tree->code_len[chr]= (uchar) (8 * sizeof(ulonglong) - size);
huff_tree->code[chr]= (code >> size);
if (huff_tree->height < 8 * sizeof(ulonglong) - size)
huff_tree->height= 8 * sizeof(ulonglong) - size;
}
else
{
size--;
make_traverse_code_tree(huff_tree,element->a.nod.left,size,code);
make_traverse_code_tree(huff_tree,element->a.nod.right,size,
code+((ulong) 1L << size));
make_traverse_code_tree(huff_tree, element->a.nod.right, size,
code + (((ulonglong) 1) << size));
}
return;
}
/*
Convert a value into binary digits.
SYNOPSIS
bindigits()
value The value.
length The number of low order bits to convert.
NOTE
The result string is in static storage. It is reused on every call.
So you cannot use it twice in one expression.
RETURN
A pointer to a static NUL-terminated string.
*/
static char *bindigits(ulonglong value, uint bits)
{
static char digits[72];
char *ptr= digits;
uint idx= bits;
DBUG_ASSERT(idx < sizeof(digits));
while (idx)
*(ptr++)= '0' + ((value >> (--idx)) & 1);
*ptr= '\0';
return digits;
}
/*
Convert a value into hexadecimal digits.
SYNOPSIS
hexdigits()
value The value.
NOTE
The result string is in static storage. It is reused on every call.
So you cannot use it twice in one expression.
RETURN
A pointer to a static NUL-terminated string.
*/
static char *hexdigits(ulonglong value)
{
static char digits[20];
char *ptr= digits;
uint idx= 2 * sizeof(value); /* Two hex digits per byte. */
DBUG_ASSERT(idx < sizeof(digits));
while (idx)
{
if ((*(ptr++)= '0' + ((value >> (4 * (--idx))) & 0xf)) > '9')
*(ptr - 1)+= 'a' - '9' - 1;
}
*ptr= '\0';
return digits;
}
/* Write header to new packed data file */
static int write_header(PACK_MRG_INFO *mrg,uint head_length,uint trees,
......@@ -1503,15 +2019,64 @@ static void write_field_info(HUFF_COUNTS *counts, uint fields, uint trees)
uint huff_tree_bits;
huff_tree_bits=max_bit(trees ? trees-1 : 0);
DBUG_PRINT("info", (""));
DBUG_PRINT("info", ("column types:"));
DBUG_PRINT("info", ("FIELD_NORMAL 0"));
DBUG_PRINT("info", ("FIELD_SKIP_ENDSPACE 1"));
DBUG_PRINT("info", ("FIELD_SKIP_PRESPACE 2"));
DBUG_PRINT("info", ("FIELD_SKIP_ZERO 3"));
DBUG_PRINT("info", ("FIELD_BLOB 4"));
DBUG_PRINT("info", ("FIELD_CONSTANT 5"));
DBUG_PRINT("info", ("FIELD_INTERVALL 6"));
DBUG_PRINT("info", ("FIELD_ZERO 7"));
DBUG_PRINT("info", ("FIELD_VARCHAR 8"));
DBUG_PRINT("info", ("FIELD_CHECK 9"));
DBUG_PRINT("info", (""));
DBUG_PRINT("info", ("pack type as a set of flags:"));
DBUG_PRINT("info", ("PACK_TYPE_SELECTED 1"));
DBUG_PRINT("info", ("PACK_TYPE_SPACE_FIELDS 2"));
DBUG_PRINT("info", ("PACK_TYPE_ZERO_FILL 4"));
DBUG_PRINT("info", (""));
if (verbose >= 2)
{
VOID(printf("\n"));
VOID(printf("column types:\n"));
VOID(printf("FIELD_NORMAL 0\n"));
VOID(printf("FIELD_SKIP_ENDSPACE 1\n"));
VOID(printf("FIELD_SKIP_PRESPACE 2\n"));
VOID(printf("FIELD_SKIP_ZERO 3\n"));
VOID(printf("FIELD_BLOB 4\n"));
VOID(printf("FIELD_CONSTANT 5\n"));
VOID(printf("FIELD_INTERVALL 6\n"));
VOID(printf("FIELD_ZERO 7\n"));
VOID(printf("FIELD_VARCHAR 8\n"));
VOID(printf("FIELD_CHECK 9\n"));
VOID(printf("\n"));
VOID(printf("pack type as a set of flags:\n"));
VOID(printf("PACK_TYPE_SELECTED 1\n"));
VOID(printf("PACK_TYPE_SPACE_FIELDS 2\n"));
VOID(printf("PACK_TYPE_ZERO_FILL 4\n"));
VOID(printf("\n"));
}
for (i=0 ; i++ < fields ; counts++)
{
write_bits((ulong) (int) counts->field_type,5);
write_bits((ulonglong) (int) counts->field_type, 5);
write_bits(counts->pack_type,6);
if (counts->pack_type & PACK_TYPE_ZERO_FILL)
write_bits(counts->max_zero_fill,5);
else
write_bits(counts->length_bits,5);
write_bits((ulong) counts->tree->tree_number-1,huff_tree_bits);
write_bits((ulonglong) counts->tree->tree_number - 1, huff_tree_bits);
DBUG_PRINT("info", ("column: %3u type: %2u pack: %2u zero: %4u "
"lbits: %2u tree: %2u length: %4u",
i , counts->field_type, counts->pack_type,
counts->max_zero_fill, counts->length_bits,
counts->tree->tree_number, counts->field_length));
if (verbose >= 2)
VOID(printf("column: %3u type: %2u pack: %2u zero: %4u lbits: %2u "
"tree: %2u length: %4u\n", i , counts->field_type,
counts->pack_type, counts->max_zero_fill, counts->length_bits,
counts->tree->tree_number, counts->field_length));
}
flush_bits();
return;
......@@ -1524,45 +2089,72 @@ static void write_field_info(HUFF_COUNTS *counts, uint fields, uint trees)
static my_off_t write_huff_tree(HUFF_TREE *huff_tree, uint trees)
{
uint i,int_length;
uint tree_no;
uint codes;
uint errors= 0;
uint *packed_tree,*offset,length;
my_off_t elements;
/* Find the highest number of elements in the trees. */
for (i=length=0 ; i < trees ; i++)
if (huff_tree[i].tree_number > 0 && huff_tree[i].elements > length)
length=huff_tree[i].elements;
/*
Allocate a buffer for packing a decode tree. Two numbers per element
(left child and right child).
*/
if (!(packed_tree=(uint*) my_alloca(sizeof(uint)*length*2)))
{
my_error(EE_OUTOFMEMORY,MYF(ME_BELL),sizeof(uint)*length*2);
return 0;
}
DBUG_PRINT("info", (""));
if (verbose >= 2)
VOID(printf("\n"));
tree_no= 0;
intervall_length=0;
for (elements=0; trees-- ; huff_tree++)
{
/* Skip columns that have been joined with other columns. */
if (huff_tree->tree_number == 0)
continue; /* Deleted tree */
tree_no++;
DBUG_PRINT("info", (""));
if (verbose >= 3)
VOID(printf("\n"));
/* Count the total number of elements (byte codes or column values). */
elements+=huff_tree->elements;
huff_tree->max_offset=2;
/* Build a tree of offsets and codes for decoding in 'packed_tree'. */
if (huff_tree->elements <= 1)
offset=packed_tree;
else
offset=make_offset_code_tree(huff_tree,huff_tree->root,packed_tree);
/* This should be the same as 'length' above. */
huff_tree->offset_bits=max_bit(huff_tree->max_offset);
/*
Since we check this during collecting the distinct column values,
this should never happen.
*/
if (huff_tree->max_offset >= IS_OFFSET)
{ /* This should be impossible */
VOID(fprintf(stderr,"Tree offset got too big: %d, aborted\n",
huff_tree->max_offset));
VOID(fprintf(stderr, "Tree offset got too big: %d, aborted\n",
huff_tree->max_offset));
my_afree((gptr) packed_tree);
return 0;
}
#ifdef EXTRA_DBUG
printf("pos: %d elements: %d tree-elements: %d char_bits: %d\n",
(uint) (file_buffer.pos-file_buffer.buffer),
huff_tree->elements, (offset-packed_tree),huff_tree->char_bits);
#endif
DBUG_PRINT("info", ("pos: %lu elements: %u tree-elements: %lu "
"char_bits: %u\n",
(ulong) (file_buffer.pos - file_buffer.buffer),
huff_tree->elements, (ulong) (offset - packed_tree),
huff_tree->char_bits));
if (!huff_tree->counts->tree_buff)
{
/* We do a byte compression on this column. Mark with bit 0. */
write_bits(0,1);
write_bits(huff_tree->min_chr,8);
write_bits(huff_tree->elements,9);
......@@ -1574,6 +2166,7 @@ static my_off_t write_huff_tree(HUFF_TREE *huff_tree, uint trees)
{
int_length=(uint) (huff_tree->counts->tree_pos -
huff_tree->counts->tree_buff);
/* We have distinct column values for this column. Mark with bit 1. */
write_bits(1,1);
write_bits(huff_tree->elements,15);
write_bits(int_length,16);
......@@ -1581,10 +2174,29 @@ static my_off_t write_huff_tree(HUFF_TREE *huff_tree, uint trees)
write_bits(huff_tree->offset_bits,5);
intervall_length+=int_length;
}
DBUG_PRINT("info", ("tree: %2u elements: %4u char_bits: %2u "
"offset_bits: %2u %s: %5u codelen: %2u",
tree_no, huff_tree->elements, huff_tree->char_bits,
huff_tree->offset_bits, huff_tree->counts->tree_buff ?
"bufflen" : "min_chr", huff_tree->counts->tree_buff ?
int_length : huff_tree->min_chr, huff_tree->height));
if (verbose >= 2)
VOID(printf("tree: %2u elements: %4u char_bits: %2u offset_bits: %2u "
"%s: %5u codelen: %2u\n", tree_no, huff_tree->elements,
huff_tree->char_bits, huff_tree->offset_bits,
huff_tree->counts->tree_buff ? "bufflen" : "min_chr",
huff_tree->counts->tree_buff ? int_length :
huff_tree->min_chr, huff_tree->height));
/* Check that the code tree length matches the element count. */
length=(uint) (offset-packed_tree);
if (length != huff_tree->elements*2-2)
printf("error: Huff-tree-length: %d != calc_length: %d\n",
length,huff_tree->elements*2-2);
{
VOID(fprintf(stderr, "error: Huff-tree-length: %d != calc_length: %d\n",
length, huff_tree->elements * 2 - 2));
errors++;
break;
}
for (i=0 ; i < length ; i++)
{
......@@ -1593,16 +2205,122 @@ static my_off_t write_huff_tree(HUFF_TREE *huff_tree, uint trees)
huff_tree->offset_bits+1);
else
write_bits(packed_tree[i]-huff_tree->min_chr,huff_tree->char_bits+1);
DBUG_PRINT("info", ("tree[0x%04x]: %s0x%04x",
i, (packed_tree[i] & IS_OFFSET) ?
" -> " : "", (packed_tree[i] & IS_OFFSET) ?
packed_tree[i] - IS_OFFSET + i : packed_tree[i]));
if (verbose >= 3)
VOID(printf("tree[0x%04x]: %s0x%04x\n",
i, (packed_tree[i] & IS_OFFSET) ? " -> " : "",
(packed_tree[i] & IS_OFFSET) ?
packed_tree[i] - IS_OFFSET + i : packed_tree[i]));
}
flush_bits();
/*
Display coding tables and check their correctness.
*/
codes= huff_tree->counts->tree_buff ? huff_tree->elements : 256;
for (i= 0; i < codes; i++)
{
ulonglong code;
uint bits;
uint len;
uint idx;
if (! (len= huff_tree->code_len[i]))
continue;
DBUG_PRINT("info", ("code[0x%04x]: 0x%s bits: %2u bin: %s", i,
hexdigits(huff_tree->code[i]), huff_tree->code_len[i],
bindigits(huff_tree->code[i],
huff_tree->code_len[i])));
if (verbose >= 3)
VOID(printf("code[0x%04x]: 0x%s bits: %2u bin: %s\n", i,
hexdigits(huff_tree->code[i]), huff_tree->code_len[i],
bindigits(huff_tree->code[i], huff_tree->code_len[i])));
/* Check that the encode table decodes correctly. */
code= 0;
bits= 0;
idx= 0;
DBUG_EXECUTE_IF("forcechkerr1", len--;);
DBUG_EXECUTE_IF("forcechkerr2", bits= 8 * sizeof(code););
DBUG_EXECUTE_IF("forcechkerr3", idx= length;);
for (;;)
{
if (! len)
{
VOID(fflush(stdout));
VOID(fprintf(stderr, "error: code 0x%s with %u bits not found\n",
hexdigits(huff_tree->code[i]), huff_tree->code_len[i]));
errors++;
break;
}
code<<= 1;
code|= (huff_tree->code[i] >> (--len)) & 1;
bits++;
if (bits > 8 * sizeof(code))
{
VOID(fflush(stdout));
VOID(fprintf(stderr, "error: Huffman code too long: %u/%u\n",
bits, 8 * sizeof(code)));
errors++;
break;
}
idx+= code & 1;
if (idx >= length)
{
VOID(fflush(stdout));
VOID(fprintf(stderr, "error: illegal tree offset: %u/%u\n",
idx, length));
errors++;
break;
}
if (packed_tree[idx] & IS_OFFSET)
idx+= packed_tree[idx] & ~IS_OFFSET;
else
break; /* Hit a leaf. This contains the result value. */
}
if (errors)
break;
DBUG_EXECUTE_IF("forcechkerr4", packed_tree[idx]++;);
if (packed_tree[idx] != i)
{
VOID(fflush(stdout));
VOID(fprintf(stderr, "error: decoded value 0x%04x should be: 0x%04x\n",
packed_tree[idx], i));
errors++;
break;
}
} /*end for (codes)*/
if (errors)
break;
/* Write column values in case of distinct column value compression. */
if (huff_tree->counts->tree_buff)
{
for (i=0 ; i < int_length ; i++)
write_bits((uint) (uchar) huff_tree->counts->tree_buff[i],8);
{
write_bits((ulonglong) (uchar) huff_tree->counts->tree_buff[i], 8);
DBUG_PRINT("info", ("column_values[0x%04x]: 0x%02x",
i, (uchar) huff_tree->counts->tree_buff[i]));
if (verbose >= 3)
VOID(printf("column_values[0x%04x]: 0x%02x\n",
i, (uchar) huff_tree->counts->tree_buff[i]));
}
}
flush_bits();
}
DBUG_PRINT("info", (""));
if (verbose >= 2)
VOID(printf("\n"));
my_afree((gptr) packed_tree);
if (errors)
{
VOID(fprintf(stderr, "Error: Generated decode trees are corrupt. Stop.\n"));
return 0;
}
return elements;
}
......@@ -1613,23 +2331,43 @@ static uint *make_offset_code_tree(HUFF_TREE *huff_tree, HUFF_ELEMENT *element,
uint *prev_offset;
prev_offset= offset;
/*
'a.leaf.null' takes the same place as 'a.nod.left'. If this is null,
then there is no left child and, hence no right child either. This
is a property of a binary tree. An element is either a node with two
childs, or a leaf without childs.
The current element is always a node with two childs. Go left first.
*/
if (!element->a.nod.left->a.leaf.null)
{
offset[0] =(uint) element->a.nod.left->a.leaf.element_nr;
/* Store the byte code or the index of the column value. */
prev_offset[0] =(uint) element->a.nod.left->a.leaf.element_nr;
offset+=2;
}
else
{
/*
Recursively traverse the tree to the left. Mark it as an offset to
another tree node (in contrast to a byte code or column value index).
*/
prev_offset[0]= IS_OFFSET+2;
offset=make_offset_code_tree(huff_tree,element->a.nod.left,offset+2);
}
/* Now, check the right child. */
if (!element->a.nod.right->a.leaf.null)
{
/* Store the byte code or the index of the column value. */
prev_offset[1]=element->a.nod.right->a.leaf.element_nr;
return offset;
}
else
{
/*
Recursively traverse the tree to the right. Mark it as an offset to
another tree node (in contrast to a byte code or column value index).
*/
uint temp=(uint) (offset-prev_offset-1);
prev_offset[1]= IS_OFFSET+ temp;
if (huff_tree->max_offset < temp)
......@@ -1656,6 +2394,7 @@ static int compress_isam_file(PACK_MRG_INFO *mrg, HUFF_COUNTS *huff_counts)
uint i,max_calc_length,pack_ref_length,min_record_length,max_record_length,
intervall,field_length,max_pack_length,pack_blob_length;
my_off_t record_count;
char llbuf[32];
ulong length,pack_length;
byte *record,*pos,*end_pos,*record_pos,*start_pos;
HUFF_COUNTS *count,*end_count;
......@@ -1663,12 +2402,23 @@ static int compress_isam_file(PACK_MRG_INFO *mrg, HUFF_COUNTS *huff_counts)
MI_INFO *isam_file=mrg->file[0];
DBUG_ENTER("compress_isam_file");
/* Allocate a buffer for the records (excluding blobs). */
if (!(record=(byte*) my_alloca(isam_file->s->base.reclength)))
return -1;
end_count=huff_counts+isam_file->s->base.fields;
min_record_length= (uint) ~0;
max_record_length=0;
/*
Calculate the maximum number of bits required to pack the records.
Remember to understand 'max_zero_fill' as 'min_zero_fill'.
The tree height determines the maximum number of bits per value.
Some fields skip leading or trailing spaces or zeroes. The skipped
number of bytes is encoded by 'length_bits' bits.
Empty blobs and varchar are encoded with a single 1 bit. Other blobs
and varchar get a leading 0 bit.
*/
for (i=max_calc_length=0 ; i < isam_file->s->base.fields ; i++)
{
if (!(huff_counts[i].pack_type & PACK_TYPE_ZERO_FILL))
......@@ -1687,14 +2437,16 @@ static int compress_isam_file(PACK_MRG_INFO *mrg, HUFF_COUNTS *huff_counts)
(huff_counts[i].field_length - huff_counts[i].max_zero_fill)*
huff_counts[i].tree->height+huff_counts[i].length_bits;
}
max_calc_length/=8;
max_calc_length= (max_calc_length + 7) / 8;
if (max_calc_length < 254)
pack_ref_length=1;
else if (max_calc_length <= 65535)
pack_ref_length=3;
else
pack_ref_length=4;
record_count=0;
/* 'max_blob_length' is the max length of all blobs of a record. */
pack_blob_length=0;
if (isam_file->s->base.blobs)
{
......@@ -1707,6 +2459,7 @@ static int compress_isam_file(PACK_MRG_INFO *mrg, HUFF_COUNTS *huff_counts)
}
max_pack_length=pack_ref_length+pack_blob_length;
DBUG_PRINT("fields", ("==="));
mrg_reset(mrg);
while ((error=mrg_rrnd(mrg,record)) != HA_ERR_END_OF_FILE)
{
......@@ -1722,15 +2475,29 @@ static int compress_isam_file(PACK_MRG_INFO *mrg, HUFF_COUNTS *huff_counts)
end_pos=start_pos+(field_length=count->field_length);
tree=count->tree;
DBUG_PRINT("fields", ("column: %3lu type: %2u pack: %2u zero: %4u "
"lbits: %2u tree: %2u length: %4u",
(ulong) (count - huff_counts + 1),
count->field_type,
count->pack_type, count->max_zero_fill,
count->length_bits, count->tree->tree_number,
count->field_length));
/* Check if the column contains spaces only. */
if (count->pack_type & PACK_TYPE_SPACE_FIELDS)
{
for (pos=start_pos ; *pos == ' ' && pos < end_pos; pos++) ;
if (pos == end_pos)
{
DBUG_PRINT("fields",
("PACK_TYPE_SPACE_FIELDS spaces only, bits: 1"));
DBUG_PRINT("fields", ("---"));
write_bits(1,1);
start_pos=end_pos;
continue;
}
DBUG_PRINT("fields",
("PACK_TYPE_SPACE_FIELDS not only spaces, bits: 1"));
write_bits(0,1);
}
end_pos-=count->max_zero_fill;
......@@ -1740,65 +2507,129 @@ static int compress_isam_file(PACK_MRG_INFO *mrg, HUFF_COUNTS *huff_counts)
case FIELD_SKIP_ZERO:
if (!memcmp((byte*) start_pos,zero_string,field_length))
{
DBUG_PRINT("fields", ("FIELD_SKIP_ZERO zeroes only, bits: 1"));
write_bits(1,1);
start_pos=end_pos;
break;
}
DBUG_PRINT("fields", ("FIELD_SKIP_ZERO not only zeroes, bits: 1"));
write_bits(0,1);
/* Fall through */
case FIELD_NORMAL:
DBUG_PRINT("fields", ("FIELD_NORMAL %lu bytes",
(ulong) (end_pos - start_pos)));
for ( ; start_pos < end_pos ; start_pos++)
{
DBUG_PRINT("fields",
("value: 0x%02x code: 0x%s bits: %2u bin: %s",
(uchar) *start_pos,
hexdigits(tree->code[(uchar) *start_pos]),
(uint) tree->code_len[(uchar) *start_pos],
bindigits(tree->code[(uchar) *start_pos],
(uint) tree->code_len[(uchar) *start_pos])));
write_bits(tree->code[(uchar) *start_pos],
(uint) tree->code_len[(uchar) *start_pos]);
}
break;
case FIELD_SKIP_ENDSPACE:
for (pos=end_pos ; pos > start_pos && pos[-1] == ' ' ; pos--) ;
length=(uint) (end_pos-pos);
length= (ulong) (end_pos - pos);
if (count->pack_type & PACK_TYPE_SELECTED)
{
if (length > count->min_space)
{
DBUG_PRINT("fields",
("FIELD_SKIP_ENDSPACE more than min_space, bits: 1"));
DBUG_PRINT("fields",
("FIELD_SKIP_ENDSPACE skip %lu/%u bytes, bits: %2u",
length, field_length, count->length_bits));
write_bits(1,1);
write_bits(length,count->length_bits);
}
else
{
DBUG_PRINT("fields",
("FIELD_SKIP_ENDSPACE not more than min_space, "
"bits: 1"));
write_bits(0,1);
pos=end_pos;
}
}
else
{
DBUG_PRINT("fields",
("FIELD_SKIP_ENDSPACE skip %lu/%u bytes, bits: %2u",
length, field_length, count->length_bits));
write_bits(length,count->length_bits);
}
/* Encode all significant bytes. */
DBUG_PRINT("fields", ("FIELD_SKIP_ENDSPACE %lu bytes",
(ulong) (pos - start_pos)));
for ( ; start_pos < pos ; start_pos++)
{
DBUG_PRINT("fields",
("value: 0x%02x code: 0x%s bits: %2u bin: %s",
(uchar) *start_pos,
hexdigits(tree->code[(uchar) *start_pos]),
(uint) tree->code_len[(uchar) *start_pos],
bindigits(tree->code[(uchar) *start_pos],
(uint) tree->code_len[(uchar) *start_pos])));
write_bits(tree->code[(uchar) *start_pos],
(uint) tree->code_len[(uchar) *start_pos]);
}
start_pos=end_pos;
break;
case FIELD_SKIP_PRESPACE:
for (pos=start_pos ; pos < end_pos && pos[0] == ' ' ; pos++) ;
length=(uint) (pos-start_pos);
length= (ulong) (pos - start_pos);
if (count->pack_type & PACK_TYPE_SELECTED)
{
if (length > count->min_space)
{
DBUG_PRINT("fields",
("FIELD_SKIP_PRESPACE more than min_space, bits: 1"));
DBUG_PRINT("fields",
("FIELD_SKIP_PRESPACE skip %lu/%u bytes, bits: %2u",
length, field_length, count->length_bits));
write_bits(1,1);
write_bits(length,count->length_bits);
}
else
{
DBUG_PRINT("fields",
("FIELD_SKIP_PRESPACE not more than min_space, "
"bits: 1"));
pos=start_pos;
write_bits(0,1);
}
}
else
{
DBUG_PRINT("fields",
("FIELD_SKIP_PRESPACE skip %lu/%u bytes, bits: %2u",
length, field_length, count->length_bits));
write_bits(length,count->length_bits);
}
/* Encode all significant bytes. */
DBUG_PRINT("fields", ("FIELD_SKIP_PRESPACE %lu bytes",
(ulong) (end_pos - start_pos)));
for (start_pos=pos ; start_pos < end_pos ; start_pos++)
{
DBUG_PRINT("fields",
("value: 0x%02x code: 0x%s bits: %2u bin: %s",
(uchar) *start_pos,
hexdigits(tree->code[(uchar) *start_pos]),
(uint) tree->code_len[(uchar) *start_pos],
bindigits(tree->code[(uchar) *start_pos],
(uint) tree->code_len[(uchar) *start_pos])));
write_bits(tree->code[(uchar) *start_pos],
(uint) tree->code_len[(uchar) *start_pos]);
}
break;
case FIELD_CONSTANT:
case FIELD_ZERO:
case FIELD_CHECK:
DBUG_PRINT("fields", ("FIELD_CONSTANT/ZERO/CHECK"));
start_pos=end_pos;
break;
case FIELD_INTERVALL:
......@@ -1806,6 +2637,10 @@ static int compress_isam_file(PACK_MRG_INFO *mrg, HUFF_COUNTS *huff_counts)
pos=(byte*) tree_search(&count->int_tree, start_pos,
count->int_tree.custom_arg);
intervall=(uint) (pos - count->tree_buff)/field_length;
DBUG_PRINT("fields", ("FIELD_INTERVALL"));
DBUG_PRINT("fields", ("index: %4u code: 0x%s bits: %2u",
intervall, hexdigits(tree->code[intervall]),
(uint) tree->code_len[intervall]));
write_bits(tree->code[intervall],(uint) tree->code_len[intervall]);
start_pos=end_pos;
break;
......@@ -1814,21 +2649,36 @@ static int compress_isam_file(PACK_MRG_INFO *mrg, HUFF_COUNTS *huff_counts)
ulong blob_length=_mi_calc_blob_length(field_length-
mi_portable_sizeof_char_ptr,
start_pos);
/* Empty blobs are encoded with a single 1 bit. */
if (!blob_length)
{
write_bits(1,1); /* Empty blob */
DBUG_PRINT("fields", ("FIELD_BLOB empty, bits: 1"));
write_bits(1,1);
}
else
{
byte *blob,*blob_end;
DBUG_PRINT("fields", ("FIELD_BLOB not empty, bits: 1"));
write_bits(0,1);
/* Write the blob length. */
DBUG_PRINT("fields", ("FIELD_BLOB %lu bytes, bits: %2u",
blob_length, count->length_bits));
write_bits(blob_length,count->length_bits);
memcpy_fixed(&blob,end_pos-mi_portable_sizeof_char_ptr,
sizeof(char*));
blob_end=blob+blob_length;
/* Encode the blob bytes. */
for ( ; blob < blob_end ; blob++)
{
DBUG_PRINT("fields",
("value: 0x%02x code: 0x%s bits: %2u bin: %s",
(uchar) *blob, hexdigits(tree->code[(uchar) *blob]),
(uint) tree->code_len[(uchar) *blob],
bindigits(tree->code[(uchar) *start_pos],
(uint)tree->code_len[(uchar) *start_pos])));
write_bits(tree->code[(uchar) *blob],
(uint) tree->code_len[(uchar) *blob]);
}
tot_blob_length+=blob_length;
}
start_pos= end_pos;
......@@ -1839,18 +2689,34 @@ static int compress_isam_file(PACK_MRG_INFO *mrg, HUFF_COUNTS *huff_counts)
uint pack_length= HA_VARCHAR_PACKLENGTH(count->field_length-1);
ulong col_length= (pack_length == 1 ? (uint) *(uchar*) start_pos :
uint2korr(start_pos));
/* Empty varchar are encoded with a single 1 bit. */
if (!col_length)
{
DBUG_PRINT("fields", ("FIELD_VARCHAR empty, bits: 1"));
write_bits(1,1); /* Empty varchar */
}
else
{
byte *end=start_pos+pack_length+col_length;
DBUG_PRINT("fields", ("FIELD_VARCHAR not empty, bits: 1"));
write_bits(0,1);
/* Write the varchar length. */
DBUG_PRINT("fields", ("FIELD_VARCHAR %lu bytes, bits: %2u",
col_length, count->length_bits));
write_bits(col_length,count->length_bits);
/* Encode the varchar bytes. */
for (start_pos+=pack_length ; start_pos < end ; start_pos++)
{
DBUG_PRINT("fields",
("value: 0x%02x code: 0x%s bits: %2u bin: %s",
(uchar) *start_pos,
hexdigits(tree->code[(uchar) *start_pos]),
(uint) tree->code_len[(uchar) *start_pos],
bindigits(tree->code[(uchar) *start_pos],
(uint)tree->code_len[(uchar) *start_pos])));
write_bits(tree->code[(uchar) *start_pos],
(uint) tree->code_len[(uchar) *start_pos]);
}
}
start_pos= end_pos;
break;
......@@ -1859,12 +2725,17 @@ static int compress_isam_file(PACK_MRG_INFO *mrg, HUFF_COUNTS *huff_counts)
abort(); /* Impossible */
}
start_pos+=count->max_zero_fill;
DBUG_PRINT("fields", ("---"));
}
flush_bits();
length=(ulong) (file_buffer.pos-record_pos)-max_pack_length;
length=(ulong) ((byte*) file_buffer.pos - record_pos) - max_pack_length;
pack_length=save_pack_length(record_pos,length);
if (pack_blob_length)
pack_length+=save_pack_length(record_pos+pack_length,tot_blob_length);
DBUG_PRINT("fields", ("record: %lu length: %lu blob-length: %lu "
"length-bytes: %lu", (ulong) record_count, length,
tot_blob_length, pack_length));
DBUG_PRINT("fields", ("==="));
/* Correct file buffer if the header was smaller */
if (pack_length != max_pack_length)
......@@ -1876,9 +2747,11 @@ static int compress_isam_file(PACK_MRG_INFO *mrg, HUFF_COUNTS *huff_counts)
min_record_length=(uint) length;
if (length > (ulong) max_record_length)
max_record_length=(uint) length;
if (write_loop && ++record_count % WRITE_COUNT == 0)
record_count++;
if (write_loop && record_count % WRITE_COUNT == 0)
{
printf("%lu\r",(ulong) record_count); VOID(fflush(stdout));
VOID(printf("%lu\r", (ulong) record_count));
VOID(fflush(stdout));
}
}
else if (error != HA_ERR_RECORD_DELETED)
......@@ -1888,8 +2761,11 @@ static int compress_isam_file(PACK_MRG_INFO *mrg, HUFF_COUNTS *huff_counts)
error=0;
else
{
fprintf(stderr,"%s: Got error %d reading records\n",my_progname,error);
VOID(fprintf(stderr, "%s: Got error %d reading records\n",
my_progname, error));
}
if (verbose >= 2)
VOID(printf("wrote %s records.\n", llstr((longlong) record_count, llbuf)));
my_afree((gptr) record);
mrg->ref_length=max_pack_length;
......@@ -1929,7 +2805,7 @@ static void init_file_buffer(File file, pbool read_buffer)
file_buffer.pos=file_buffer.buffer;
file_buffer.bits=BITS_SAVED;
}
file_buffer.current_byte=0;
file_buffer.bitbucket= 0;
}
......@@ -1972,7 +2848,8 @@ static int flush_buffer(ulong neaded_length)
tmp=my_realloc(file_buffer.buffer, neaded_length,MYF(MY_WME));
if (!tmp)
return 1;
file_buffer.pos= tmp + (ulong) (file_buffer.pos - file_buffer.buffer);
file_buffer.pos= ((uchar*) tmp +
(ulong) (file_buffer.pos - file_buffer.buffer));
file_buffer.buffer=tmp;
file_buffer.end=tmp+neaded_length-8;
}
......@@ -1987,68 +2864,59 @@ static void end_file_buffer(void)
/* output `bits` low bits of `value' */
static void write_bits (register ulong value, register uint bits)
static void write_bits(register ulonglong value, register uint bits)
{
if ((file_buffer.bits-=(int) bits) >= 0)
DBUG_ASSERT(((bits < 8 * sizeof(value)) && ! (value >> bits)) ||
(bits == 8 * sizeof(value)));
if ((file_buffer.bits-= (int) bits) >= 0)
{
file_buffer.current_byte|=value << file_buffer.bits;
file_buffer.bitbucket|= value << file_buffer.bits;
}
else
{
reg3 uint byte_buff;
reg3 ulonglong bit_buffer;
bits= (uint) -file_buffer.bits;
DBUG_ASSERT(bits <= 8 * sizeof(value));
byte_buff= (file_buffer.current_byte |
((bits != 8 * sizeof(value)) ? (uint) (value >> bits) : 0));
#if BITS_SAVED == 32
*file_buffer.pos++= (byte) (byte_buff >> 24) ;
*file_buffer.pos++= (byte) (byte_buff >> 16) ;
bit_buffer= (file_buffer.bitbucket |
((bits != 8 * sizeof(value)) ? (value >> bits) : 0));
#if BITS_SAVED == 64
*file_buffer.pos++= (uchar) (bit_buffer >> 56);
*file_buffer.pos++= (uchar) (bit_buffer >> 48);
*file_buffer.pos++= (uchar) (bit_buffer >> 40);
*file_buffer.pos++= (uchar) (bit_buffer >> 32);
#endif
*file_buffer.pos++= (byte) (byte_buff >> 8) ;
*file_buffer.pos++= (byte) byte_buff;
*file_buffer.pos++= (uchar) (bit_buffer >> 24);
*file_buffer.pos++= (uchar) (bit_buffer >> 16);
*file_buffer.pos++= (uchar) (bit_buffer >> 8);
*file_buffer.pos++= (uchar) (bit_buffer);
DBUG_ASSERT(bits <= 8 * sizeof(ulong));
if (bits != 8 * sizeof(value))
value&= (((ulong) 1) << bits) - 1;
#if BITS_SAVED == 16
if (bits >= sizeof(uint))
{
bits-=8;
*file_buffer.pos++= (uchar) (value >> bits);
value&= (1 << bits)-1;
if (bits >= sizeof(uint))
{
bits-=8;
*file_buffer.pos++= (uchar) (value >> bits);
value&= (1 << bits)-1;
}
}
#endif
value&= (((ulonglong) 1) << bits) - 1;
if (file_buffer.pos >= file_buffer.end)
VOID(flush_buffer(~ (ulong) 0));
file_buffer.bits=(int) (BITS_SAVED - bits);
file_buffer.current_byte=(uint) (value << (BITS_SAVED - bits));
file_buffer.bitbucket= value << (BITS_SAVED - bits);
}
return;
}
/* Flush bits in bit_buffer to buffer */
static void flush_bits (void)
static void flush_bits(void)
{
uint bits,byte_buff;
int bits;
ulonglong bit_buffer;
bits=(file_buffer.bits) & ~7;
byte_buff = file_buffer.current_byte >> bits;
bits=BITS_SAVED - bits;
bits= file_buffer.bits & ~7;
bit_buffer= file_buffer.bitbucket >> bits;
bits= BITS_SAVED - bits;
while (bits > 0)
{
bits-=8;
*file_buffer.pos++= (byte) (uchar) (byte_buff >> bits) ;
bits-= 8;
*file_buffer.pos++= (uchar) (bit_buffer >> bits);
}
file_buffer.bits=BITS_SAVED;
file_buffer.current_byte=0;
return;
file_buffer.bits= BITS_SAVED;
file_buffer.bitbucket= 0;
}
......@@ -2196,3 +3064,131 @@ static int mrg_close(PACK_MRG_INFO *mrg)
my_free((gptr) mrg->file,MYF(0));
return error;
}
#if !defined(DBUG_OFF)
/*
Fake the counts to get big Huffman codes.
SYNOPSIS
fakebigcodes()
huff_counts A pointer to the counts array.
end_count A pointer past the counts array.
DESCRIPTION
Huffman coding works by removing the two least frequent values from
the list of values and add a new value with the sum of their
incidences in a loop until only one value is left. Every time a
value is reused for a new value, it gets one more bit for its
encoding. Hence, the least frequent values get the longest codes.
To get a maximum code length for a value, two of the values must
have an incidence of 1. As their sum is 2, the next infrequent value
must have at least an incidence of 2, then 4, 8, 16 and so on. This
means that one needs 2**n bytes (values) for a code length of n
bits. However, using more distinct values forces the use of longer
codes, or reaching the code length with less total bytes (values).
To get 64(32)-bit codes, I sort the counts by decreasing incidence.
I assign counts of 1 to the two most frequent values, a count of 2
for the next one, then 4, 8, and so on until 2**64-1(2**30-1). All
the remaining values get 1. That way every possible byte has an
assigned code, though not all codes are used if not all byte values
are present in the column.
This strategy would work with distinct column values too, but
requires that at least 64(32) values are present. To make things
easier here, I cancel all distinct column values and force byte
compression for all columns.
RETURN
void
*/
static void fakebigcodes(HUFF_COUNTS *huff_counts, HUFF_COUNTS *end_count)
{
HUFF_COUNTS *count;
my_off_t *cur_count_p;
my_off_t *end_count_p;
my_off_t **cur_sort_p;
my_off_t **end_sort_p;
my_off_t *sort_counts[256];
my_off_t total;
DBUG_ENTER("fakebigcodes");
for (count= huff_counts; count < end_count; count++)
{
/*
Remove distinct column values.
*/
if (huff_counts->tree_buff)
{
my_free((gptr) huff_counts->tree_buff, MYF(0));
delete_tree(&huff_counts->int_tree);
huff_counts->tree_buff= NULL;
DBUG_PRINT("fakebigcodes", ("freed distinct column values"));
}
/*
Sort counts by decreasing incidence.
*/
cur_count_p= count->counts;
end_count_p= cur_count_p + 256;
cur_sort_p= sort_counts;
while (cur_count_p < end_count_p)
*(cur_sort_p++)= cur_count_p++;
(void) qsort(sort_counts, 256, sizeof(my_off_t*), (qsort_cmp) fakecmp);
/*
Assign faked counts.
*/
cur_sort_p= sort_counts;
#if SIZEOF_LONG_LONG > 4
end_sort_p= sort_counts + 8 * sizeof(ulonglong) - 1;
#else
end_sort_p= sort_counts + 8 * sizeof(ulonglong) - 2;
#endif
/* Most frequent value gets a faked count of 1. */
**(cur_sort_p++)= 1;
total= 1;
while (cur_sort_p < end_sort_p)
{
**(cur_sort_p++)= total;
total<<= 1;
}
/* Set the last value. */
**(cur_sort_p++)= --total;
/*
Set the remaining counts.
*/
end_sort_p= sort_counts + 256;
while (cur_sort_p < end_sort_p)
**(cur_sort_p++)= 1;
}
DBUG_VOID_RETURN;
}
/*
Compare two counts for reverse sorting.
SYNOPSIS
fakecmp()
count1 One count.
count2 Another count.
RETURN
1 count1 < count2
0 count1 == count2
-1 count1 > count2
*/
static int fakecmp(my_off_t **count1, my_off_t **count2)
{
return ((**count1 < **count2) ? 1 :
(**count1 > **count2) ? -1 : 0);
}
#endif
......@@ -263,6 +263,9 @@ TREE_ELEMENT *tree_insert(TREE *tree, void *key, uint key_size,
if (tree->flag & TREE_NO_DUPS)
return(NULL);
element->count++;
/* Avoid a wrap over of the count. */
if (! element->count)
element->count--;
}
DBUG_EXECUTE("check_tree", test_rb_tree(tree->root););
return element;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment