Commit 3d952cd8 authored by Michael Okoko's avatar Michael Okoko Committed by Sergei Petrunia

Improve tests and test results to cover larger cases

Signed-off-by: default avatarMichael Okoko <okokomichaels@outlook.com>
parent 63cbd074
......@@ -3,38 +3,47 @@
--echo # todo: should be merged with statistics_json.test
--echo #
set @save_histogram_type=@@histogram_type;
set @save_histogram_size=@@histogram_size;
create table ten(a int primary key);
insert into ten values (0),(1),(2),(3),(4),(5),(6),(7),(8),(9);
create table t1 (a varchar(255));
insert into t1 select concat('a-', a) from ten;
analyze table t1 persistent for all;
select * from mysql.column_stats where table_name='t1';
explain extended select * from t1 where a between 'a-3a' and 'zzzzzzzzz';
create table t2(a int);
insert into t2 select a*10 from ten;
analyze table t2 persistent for all;
explain extended select * from t2 where a between '44' and '55';
create table t1_bin (a varchar(255));
insert into t1_bin select concat('a-', a) from ten;
drop table t1;
drop table t2;
set histogram_size=10;
analyze table t1_bin persistent for all;
select hex(histogram) from mysql.column_stats where table_name='t1_bin';
explain extended select * from t1_bin where a between 'a-3a' and 'zzzzzzzzz';
analyze select * from t1_bin where a between 'a-3a' and 'zzzzzzzzz';
create table t1 (a varchar(255));
insert into t1 select concat('a-', a) from ten;
create table t1_json (a varchar(255));
insert into t1_json select concat('a-', a) from ten;
set histogram_type=json;
analyze table t1_json persistent for all;
select * from mysql.column_stats where table_name='t1_json';
explain extended select * from t1_json where a between 'a-3a' and 'zzzzzzzzz';
analyze select * from t1_json where a between 'a-3a' and 'zzzzzzzzz';
analyze table t1 persistent for all;
select * from mysql.column_stats where table_name='t1';
explain extended select * from t1 where a between 'a-3a' and 'zzzzzzzzz';
create table t2(a int);
insert into t2 select a*10 from ten;
set histogram_type=json;
analyze table t2 persistent for all;
explain extended select * from t2 where a between '44' and '55';
create table t2_bin(a int);
insert into t2_bin select a*10 from ten;
set histogram_type=@save_histogram_type;
analyze table t2_bin persistent for all;
explain extended select * from t2_bin where a between '44' and '55';
analyze select * from t2_bin where a between '44' and '55';
drop table t1;
drop table t2;
create table t2_json(a int);
insert into t2_json select a*10 from ten;
set histogram_type=json;
analyze table t2_json persistent for all;
select * from mysql.column_stats where table_name='t2_json';
explain extended select * from t2_json where a between '44' and '55';
analyze select * from t2_json where a between '44' and '55';
drop table t1_bin;
drop table t1_json;
drop table t2_bin;
drop table t2_json;
This diff is collapsed.
......@@ -12,34 +12,80 @@ set @save_histogram_type=@@histogram_type;
set @save_histogram_size=@@histogram_size;
CREATE TABLE t1 (
a int,
b varchar(32),
c char(2),
d double
a int NOT NULL PRIMARY KEY,
b varchar(32),
c char(16),
d date,
e double,
f bit(3),
INDEX idx1 (b, e),
INDEX idx2 (c, d),
INDEX idx3 (d),
INDEX idx4 (e, b, d)
);
--disable_result_log
INSERT INTO t1 SELECT seq, seq, seq, seq from seq_1_to_25;
INSERT INTO t1 VALUES
(0, NULL, NULL, NULL, NULL, NULL),
(7, 'xxxxxxxxxxxxxxxxxxxxxxxxxx', 'dddddddd', '1990-05-15', 0.1, b'100'),
(17, 'vvvvvvvvvvvvv', 'aaaa', '1989-03-12', 0.01, b'101'),
(1, 'vvvvvvvvvvvvv', NULL, '1989-03-12', 0.01, b'100'),
(12, 'wwwwwwwwwwwwwwwwwwwwwwwwwwww', 'dddddddd', '1999-07-23', 0.112, b'001'),
(23, 'vvvvvvvvvvvvv', 'dddddddd', '1999-07-23', 0.1, b'100'),
(8, 'vvvvvvvvvvvvv', 'aaaa', '1999-07-23', 0.1, b'100'),
(22, 'xxxxxxxxxxxxxxxxxxxxxxxxxx', 'aaaa', '1989-03-12', 0.112, b'001'),
(31, 'wwwwwwwwwwwwwwwwwwwwwwwwwwww', 'aaaa', '1999-07-23', 0.01, b'001'),
(10, NULL, 'aaaa', NULL, 0.01, b'010'),
(5, 'wwwwwwwwwwwwwwwwwwwwwwwwwwww', 'dddddddd', '1999-07-23', 0.1, b'100'),
(15, 'vvvvvvvvvvvvv', 'ccccccccc', '1990-05-15', 0.1, b'010'),
(30, NULL, 'bbbbbb', NULL, NULL, b'100'),
(38, 'zzzzzzzzzzzzzzzzzz', 'bbbbbb', NULL, NULL, NULL),
(18, 'zzzzzzzzzzzzzzzzzz', 'ccccccccc', '1990-05-15', 0.01, b'010'),
(9, 'yyy', 'bbbbbb', '1998-08-28', 0.01, NULL),
(29, 'vvvvvvvvvvvvv', 'dddddddd', '1999-07-23', 0.012, b'010'),
(3, 'yyy', 'dddddddd', '1990-05-15', 0.112, b'010'),
(39, 'zzzzzzzzzzzzzzzzzz', 'bbbbbb', NULL, 0.01, b'100'),
(14, 'xxxxxxxxxxxxxxxxxxxxxxxxxx', 'ccccccccc', '1990-05-15', 0.1, b'100'),
(40, 'zzzzzzzzzzzzzzzzzz', 'bbbbbb', '1989-03-12', NULL, NULL),
(44, NULL, 'aaaa', '1989-03-12', NULL, b'010'),
(19, 'vvvvvvvvvvvvv', 'ccccccccc', '1990-05-15', 0.012, b'011'),
(21, 'zzzzzzzzzzzzzzzzzz', 'dddddddd', '1989-03-12', 0.112, b'100'),
(45, NULL, NULL, '1989-03-12', NULL, b'011'),
(2, 'wwwwwwwwwwwwwwwwwwwwwwwwwwww', 'ccccccccc', '1990-05-15', 0.1, b'001'),
(35, 'yyy', 'aaaa', '1990-05-15', 0.05, b'011'),
(4, 'vvvvvvvvvvvvv', 'dddddddd', '1999-07-23', 0.01, b'101'),
(47, NULL, 'aaaa', '1990-05-15', 0.05, b'010'),
(42, NULL, 'ccccccccc', '1989-03-12', 0.01, b'010'),
(32, NULL, 'bbbbbb', '1990-05-15', 0.01, b'011'),
(49, 'wwwwwwwwwwwwwwwwwwwwwwwwwwww' , 'aaaa', '1990-05-15', NULL, NULL),
(43, 'wwwwwwwwwwwwwwwwwwwwwwwwwwww' , 'bbbbbb', '1990-05-15', NULL, b'100'),
(37, 'yyy', NULL, '1989-03-12', 0.05, b'011'),
(41, 'xxxxxxxxxxxxxxxxxxxxxxxxxx', 'ccccccccc', '1990-05-15', 0.05, NULL),
(34, 'yyy', NULL, NULL, NULL, NULL),
(33, 'zzzzzzzzzzzzzzzzzz', 'dddddddd', '1989-03-12', 0.05, b'011'),
(24, 'wwwwwwwwwwwwwwwwwwwwwwwwwwww', 'dddddddd', '1990-05-15', 0.01, b'101'),
(11, 'yyy', 'ccccccccc', '1999-07-23', 0.1, NULL),
(25, 'zzzzzzzzzzzzzzzzzz', 'bbb', '1989-03-12', 0.01, b'101');
--enable_result_log
SET histogram_type='JSON';
# set histogram size to be < row count (25 in this case) to see how histogram behaves
set histogram_size=10;
set histogram_size=25;
ANALYZE TABLE t1 PERSISTENT FOR ALL;
SELECT * FROM mysql.column_stats WHERE table_name='t1';
explain extended select * from t1 where b between '20' and '70';
ANALYZE TABLE t1 persistent for all;
SELECT * FROM mysql.table_stats;
SELECT * FROM mysql.column_stats;
SELECT * FROM mysql.index_stats;
SELECT COUNT(*) FROM t1;
# We then test different valid JSON strings that are invalid histograms.
UPDATE mysql.column_stats SET histogram='["1", {"a": "b"}, "2"]' WHERE table_name='t1';
FLUSH TABLES;
--error ER_JSON_HISTOGRAM_PARSE_FAILED
SELECT * FROM t1;
explain extended select * from t1 where a between '20' and '70';
analyze select * from t1 where a between '20' and '70';
UPDATE mysql.column_stats SET histogram='{}' WHERE table_name='t1';
FLUSH TABLES;
--error ER_JSON_HISTOGRAM_PARSE_FAILED
SELECT * FROM t1;
# todo: test different valid JSON strings that are invalid histograms.
# UPDATE mysql.column_stats SET histogram='["1", {"a": "b"}, "2"]' WHERE table_name='t1';
# FLUSH TABLES;
# --error ER_JSON_HISTOGRAM_PARSE_FAILED
# explain extended select * from t1 where a between '20' and '70';
DELETE FROM mysql.column_stats;
DROP TABLE t1;
......@@ -56,12 +102,14 @@ use world;
--enable_query_log
set histogram_type='JSON';
set histogram_size=25;
set histogram_size=50;
--disable_result_log
ANALYZE TABLE Country PERSISTENT FOR ALL;
ANALYZE TABLE Country, City, CountryLanguage persistent for all;
--enable_result_log
SELECT column_name, min_value, max_value, hist_size, hist_type, histogram FROM mysql.column_stats;
explain extended select * from Country where 'Code' between 'BBC' and 'GGG';
analyze select * from Country where 'Code' between 'BBC' and 'GGG';
set histogram_type=@save_histogram_type;
set histogram_size=@save_histogram_size;
......
......@@ -1564,9 +1564,9 @@ int Histogram_json::find_bucket(Field *field, const uchar *endpoint)
mid_val = histogram_bounds[mid];
int res = field->key_cmp((uchar*) mid_val.data(), endpoint);
min_bucket_index = mid;
if (res < 0) {
low = mid + 1;
min_bucket_index = mid;
} else if (res > 0) {
high = mid - 1;
} else {
......@@ -1574,6 +1574,9 @@ int Histogram_json::find_bucket(Field *field, const uchar *endpoint)
break;
}
}
if (min_bucket_index == -1)
min_bucket_index = high;
return min_bucket_index;
}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment