Commit e8b1e216 authored by bell@sanja.is.com.ua's avatar bell@sanja.is.com.ua

Merge sanja.is.com.ua:/home/bell/mysql/bk/work-cond_count-4.1

into sanja.is.com.ua:/home/bell/mysql/bk/work-top2-4.1
parents 2f54542f 2d120d32
......@@ -17,6 +17,7 @@ bar@gw.udmsearch.izhnet.ru
bell@laptop.sanja.is.com.ua
bell@sanja.is.com.ua
bk@admin.bk
bk@mysql.r18.ru
carsten@tsort.bitbybit.dk
davida@isil.mysql.com
gluh@gluh.(none)
......
......@@ -154,8 +154,26 @@ Warning 1258 1 line(s) was(were) cut by group_concat()
show warnings;
Level Code Message
Warning 1258 1 line(s) was(were) cut by group_concat()
set group_concat_max_len = 1024;
drop table if exists T_URL;
Warnings:
Note 1051 Unknown table 'T_URL'
create table T_URL ( URL_ID int(11), URL varchar(80));
drop table if exists T_REQUEST;
Warnings:
Note 1051 Unknown table 'T_REQUEST'
create table T_REQUEST ( REQ_ID int(11), URL_ID int(11));
insert into T_URL values (4,'www.host.com'), (5,'www.google.com'),(5,'www.help.com');
insert into T_REQUEST values (1,4), (5,4), (5,5);
select REQ_ID, Group_Concat(URL) as URL from T_URL, T_REQUEST where
T_REQUEST.URL_ID = T_URL.URL_ID group by REQ_ID;
REQ_ID URL
1 www.host.com
5 www.host.com,www.google.com,www.help.com
drop table T_URL;
drop table T_REQUEST;
select group_concat(sum(a)) from t1 group by grp;
ERROR HY000: Invalid use of group function
select grp,group_concat(c order by 2) from t1 group by grp;
ERROR 42S22: Unknown column '2' in 'group statement'
drop table if exists t1;
drop table t1;
......@@ -1150,3 +1150,28 @@ INSERT INTO t1 VALUES (1,0,NULL,NULL),(2,0,NULL,NULL);
SELECT DISTINCT REF_ID FROM t1 WHERE ID= (SELECT DISTINCT REF_ID FROM t1 WHERE ID=2);
REF_ID
DROP TABLE t1;
CREATE TABLE t1
(
FOLDERID VARCHAR(32)BINARY NOT NULL
, FOLDERNAME VARCHAR(255)BINARY NOT NULL
, CREATOR VARCHAR(255)BINARY
, CREATED TIMESTAMP NOT NULL
, DESCRIPTION VARCHAR(255)BINARY
, FOLDERTYPE INTEGER NOT NULL
, MODIFIED TIMESTAMP
, MODIFIER VARCHAR(255)BINARY
, FOLDERSIZE INTEGER NOT NULL
, PARENTID VARCHAR(32)BINARY
, REPID VARCHAR(32)BINARY
, ORIGINATOR INTEGER
, PRIMARY KEY ( FOLDERID )
) TYPE=InnoDB;
CREATE INDEX FFOLDERID_IDX ON t1 (FOLDERID);
CREATE INDEX CMFLDRPARNT_IDX ON t1 (PARENTID);
INSERT INTO t1 VALUES("0c9aab05b15048c59bc35c8461507deb", "System", "System", "2003-06-05 16:30:00", "The system content repository folder.", "3", "2003-06-05 16:30:00", "System", "0", NULL, "9c9aab05b15048c59bc35c8461507deb", "1");
INSERT INTO t1 VALUES("2f6161e879db43c1a5b82c21ddc49089", "Default", "System", "2003-06-09 10:52:02", "The default content repository folder.", "3", "2003-06-05 16:30:00", "System", "0", NULL, "03eea05112b845949f3fd03278b5fe43", "1");
INSERT INTO t1 VALUES("c373e9f5ad0791724315444553544200", "AddDocumentTest", "admin", "2003-06-09 10:51:25", "Movie Reviews", "0", "2003-06-09 10:51:25", "admin", "0", "2f6161e879db43c1a5b82c21ddc49089", "03eea05112b845949f3fd03278b5fe43", NULL);
SELECT 'c373e9f5ad0791a0dab5444553544200' IN(SELECT t1.FOLDERID FROM t1 WHERE t1.PARENTID='2f6161e879db43c1a5b82c21ddc49089' AND t1.FOLDERNAME = 'Level1');
'c373e9f5ad0791a0dab5444553544200' IN(SELECT t1.FOLDERID FROM t1 WHERE t1.PARENTID='2f6161e879db43c1a5b82c21ddc49089' AND t1.FOLDERNAME = 'Level1')
0
drop table t1;
......@@ -68,6 +68,20 @@ select grp,group_concat(c order by c) from t1 group by grp;
set group_concat_max_len = 5;
select grp,group_concat(c) from t1 group by grp;
show warnings;
set group_concat_max_len = 1024;
# Test variable length
drop table if exists T_URL;
create table T_URL ( URL_ID int(11), URL varchar(80));
drop table if exists T_REQUEST;
create table T_REQUEST ( REQ_ID int(11), URL_ID int(11));
insert into T_URL values (4,'www.host.com'), (5,'www.google.com'),(5,'www.help.com');
insert into T_REQUEST values (1,4), (5,4), (5,5);
select REQ_ID, Group_Concat(URL) as URL from T_URL, T_REQUEST where
T_REQUEST.URL_ID = T_URL.URL_ID group by REQ_ID;
drop table T_URL;
drop table T_REQUEST;
# Test errors
......@@ -76,4 +90,4 @@ select group_concat(sum(a)) from t1 group by grp;
--error 1054
select grp,group_concat(c order by 2) from t1 group by grp;
drop table if exists t1;
drop table t1;
......@@ -732,3 +732,31 @@ CREATE TABLE t1 (
INSERT INTO t1 VALUES (1,0,NULL,NULL),(2,0,NULL,NULL);
SELECT DISTINCT REF_ID FROM t1 WHERE ID= (SELECT DISTINCT REF_ID FROM t1 WHERE ID=2);
DROP TABLE t1;
#
# key field overflow test
#
CREATE TABLE t1
(
FOLDERID VARCHAR(32)BINARY NOT NULL
, FOLDERNAME VARCHAR(255)BINARY NOT NULL
, CREATOR VARCHAR(255)BINARY
, CREATED TIMESTAMP NOT NULL
, DESCRIPTION VARCHAR(255)BINARY
, FOLDERTYPE INTEGER NOT NULL
, MODIFIED TIMESTAMP
, MODIFIER VARCHAR(255)BINARY
, FOLDERSIZE INTEGER NOT NULL
, PARENTID VARCHAR(32)BINARY
, REPID VARCHAR(32)BINARY
, ORIGINATOR INTEGER
, PRIMARY KEY ( FOLDERID )
) TYPE=InnoDB;
CREATE INDEX FFOLDERID_IDX ON t1 (FOLDERID);
CREATE INDEX CMFLDRPARNT_IDX ON t1 (PARENTID);
INSERT INTO t1 VALUES("0c9aab05b15048c59bc35c8461507deb", "System", "System", "2003-06-05 16:30:00", "The system content repository folder.", "3", "2003-06-05 16:30:00", "System", "0", NULL, "9c9aab05b15048c59bc35c8461507deb", "1");
INSERT INTO t1 VALUES("2f6161e879db43c1a5b82c21ddc49089", "Default", "System", "2003-06-09 10:52:02", "The default content repository folder.", "3", "2003-06-05 16:30:00", "System", "0", NULL, "03eea05112b845949f3fd03278b5fe43", "1");
INSERT INTO t1 VALUES("c373e9f5ad0791724315444553544200", "AddDocumentTest", "admin", "2003-06-09 10:51:25", "Movie Reviews", "0", "2003-06-09 10:51:25", "admin", "0", "2f6161e879db43c1a5b82c21ddc49089", "03eea05112b845949f3fd03278b5fe43", NULL);
SELECT 'c373e9f5ad0791a0dab5444553544200' IN(SELECT t1.FOLDERID FROM t1 WHERE t1.PARENTID='2f6161e879db43c1a5b82c21ddc49089' AND t1.FOLDERNAME = 'Level1');
drop table t1;
......@@ -145,6 +145,7 @@ then
then
i_u="INSERT INTO user VALUES ('localhost','root','','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','','','','',0,0,0);
INSERT INTO user VALUES ('$hostname','root','','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','','','','',0,0,0);
REPLACE INTO user VALUES ('127.0.0.1','root','','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','Y','','','','',0,0,0);
INSERT INTO user (host,user) values ('localhost','');
INSERT INTO user (host,user) values ('$hostname','');"
else
......
......@@ -183,7 +183,7 @@ Field::Field(char *ptr_arg,uint32 length_arg,uchar *null_ptr_arg,
field_name(field_name_arg),
query_id(0),key_start(0),part_of_key(0),part_of_sortkey(0),
unireg_check(unireg_check_arg),
field_length(length_arg),null_bit(null_bit_arg)
field_length(length_arg),null_bit(null_bit_arg),abs_offset(0)
{
flags=null_ptr ? 0: NOT_NULL_FLAG;
comment.str= (char*) "";
......
......@@ -66,6 +66,7 @@ public:
uint32 field_length; // Length of field
uint16 flags;
uchar null_bit; // Bit used to test null bit
uint abs_offset; // use only in group_concat
Field(char *ptr_arg,uint32 length_arg,uchar *null_ptr_arg,uchar null_bit_arg,
utype unireg_check_arg, const char *field_name_arg,
......
......@@ -1644,7 +1644,7 @@ Item_cond::fix_fields(THD *thd, TABLE_LIST *tables, Item **ref)
maybe_null=1;
}
if (thd)
thd->cond_count+=list.elements;
thd->lex.current_select->cond_count+=list.elements;
fix_length_and_dec();
fixed= 1;
return 0;
......
......@@ -893,6 +893,8 @@ int subselect_single_select_engine::exec()
{
DBUG_ENTER("subselect_single_select_engine::exec");
char const *save_where= join->thd->where;
SELECT_LEX_NODE *save_select= join->thd->lex.current_select;
join->thd->lex.current_select= select_lex;
if (!optimized)
{
optimized=1;
......@@ -900,6 +902,7 @@ int subselect_single_select_engine::exec()
{
join->thd->where= save_where;
executed= 1;
join->thd->lex.current_select= save_select;
DBUG_RETURN(join->error?join->error:1);
}
}
......@@ -908,6 +911,7 @@ int subselect_single_select_engine::exec()
if (join->reinit())
{
join->thd->where= save_where;
join->thd->lex.current_select= save_select;
DBUG_RETURN(1);
}
item->reset();
......@@ -915,15 +919,14 @@ int subselect_single_select_engine::exec()
}
if (!executed)
{
SELECT_LEX_NODE *save_select= join->thd->lex.current_select;
join->thd->lex.current_select= select_lex;
join->exec();
join->thd->lex.current_select= save_select;
executed= 1;
join->thd->where= save_where;
join->thd->lex.current_select= save_select;
DBUG_RETURN(join->error||thd->is_fatal_error);
}
join->thd->where= save_where;
join->thd->lex.current_select= save_select;
DBUG_RETURN(0);
}
......
......@@ -1453,27 +1453,28 @@ String *Item_sum_udf_str::val_str(String *str)
GROUP_CONCAT(DISTINCT expr,...)
*/
static int group_concat_key_cmp_with_distinct(void* arg, byte* key1,
byte* key2)
int group_concat_key_cmp_with_distinct(void* arg, byte* key1,
byte* key2)
{
Item_func_group_concat* item= (Item_func_group_concat*)arg;
for (uint i= 0; i < item->arg_count_field; i++)
{
Item *field_item= item->expr[i];
Item *field_item= item->args[i];
Field *field= field_item->tmp_table_field();
if (field)
{
uint offset= field->offset();
uint offset= field->abs_offset;
int res= field->key_cmp(key1 + offset, key2 + offset);
/*
if key1 and key2 is not equal than field->key_cmp return offset. This
function must return value 1 for this case.
function must return value 1 for this case.
*/
if (res)
return 1;
}
}
}
return 0;
}
......@@ -1483,9 +1484,10 @@ static int group_concat_key_cmp_with_distinct(void* arg, byte* key1,
GROUP_CONCAT(expr,... ORDER BY col,... )
*/
static int group_concat_key_cmp_with_order(void* arg, byte* key1, byte* key2)
int group_concat_key_cmp_with_order(void* arg, byte* key1, byte* key2)
{
Item_func_group_concat* item= (Item_func_group_concat*)arg;
for (uint i=0; i < item->arg_count_order; i++)
{
ORDER *order_item= item->order[i];
......@@ -1493,14 +1495,14 @@ static int group_concat_key_cmp_with_order(void* arg, byte* key1, byte* key2)
Field *field= item->tmp_table_field();
if (field)
{
uint offset= field->offset();
uint offset= field->abs_offset;
bool dir= order_item->asc;
int res= field->key_cmp(key1 + offset, key2 + offset);
if (res)
return dir ? res : -res;
}
}
}
/*
We can't return 0 because tree class remove this item as double value.
*/
......@@ -1513,9 +1515,8 @@ static int group_concat_key_cmp_with_order(void* arg, byte* key1, byte* key2)
GROUP_CONCAT(DISTINCT expr,... ORDER BY col,... )
*/
static int group_concat_key_cmp_with_distinct_and_order(void* arg,
byte* key1,
byte* key2)
int group_concat_key_cmp_with_distinct_and_order(void* arg,byte* key1,
byte* key2)
{
if (!group_concat_key_cmp_with_distinct(arg,key1,key2))
return 0;
......@@ -1528,24 +1529,23 @@ static int group_concat_key_cmp_with_distinct_and_order(void* arg,
item is pointer to Item_func_group_concat
*/
static int dump_leaf_key(byte* key, uint32 count __attribute__((unused)),
int dump_leaf_key(byte* key, uint32 count __attribute__((unused)),
Item_func_group_concat *group_concat_item)
{
char buff[MAX_FIELD_WIDTH];
String tmp((char *)&buff,sizeof(buff),default_charset_info);
String tmp2((char *)&buff,sizeof(buff),default_charset_info);
tmp.length(0);
for (uint i= 0; i < group_concat_item->arg_show_fields; i++)
{
Item *show_item= group_concat_item->expr[i];
Item *show_item= group_concat_item->args[i];
if (!show_item->const_item())
{
Field *f= show_item->tmp_table_field();
uint offset= f->offset();
char *sv= f->ptr;
f->ptr= (char *)key + offset;
f->ptr= (char *)key + f->abs_offset;
String *res= f->val_str(&tmp,&tmp2);
group_concat_item->result.append(*res);
f->ptr= sv;
......@@ -1595,9 +1595,14 @@ Item_func_group_concat::Item_func_group_concat(bool is_distinct,
List<Item> *is_select,
SQL_LIST *is_order,
String *is_separator)
:Item_sum(), tmp_table_param(0), warning_available(false),
separator(is_separator), tree(&tree_base), table(0),
count_cut_values(0), tree_mode(0), distinct(is_distinct)
:Item_sum(), tmp_table_param(0), max_elements_in_tree(0), warning(0),
warning_available(0), key_length(0), rec_offset(0),
tree_mode(0), distinct(is_distinct), warning_for_row(0),
separator(is_separator), tree(&tree_base), table(0),
order(0), tables_list(0), group_concat_max_len(0),
show_elements(0), arg_count_order(0), arg_count_field(0),
arg_show_fields(0), count_cut_values(0)
{
original= 0;
quick_group= 0;
......@@ -1613,16 +1618,12 @@ Item_func_group_concat::Item_func_group_concat(bool is_distinct,
We need to allocate:
args - arg_count+arg_count_order (for possible order items in temporare
tables)
expr - arg_count_field
order - arg_count_order
*/
args= (Item**) sql_alloc(sizeof(Item*)*(arg_count+arg_count_order+
arg_count_field)+
args= (Item**) sql_alloc(sizeof(Item*)*(arg_count+arg_count_order)+
sizeof(ORDER*)*arg_count_order);
if (!args)
return; // thd->fatal is set
expr= args;
expr+= arg_count+arg_count_order;
return;
/* fill args items of show and sort */
int i= 0;
......@@ -1630,12 +1631,12 @@ Item_func_group_concat::Item_func_group_concat(bool is_distinct,
Item *item_select;
for ( ; (item_select= li++) ; i++)
args[i]= expr[i]= item_select;
args[i]= item_select;
if (arg_count_order)
{
i= 0;
order= (ORDER**)(expr + arg_count_field);
order= (ORDER**)(args + arg_count + arg_count_order);
for (ORDER *order_item= (ORDER*) is_order->first;
order_item != NULL;
order_item= order_item->next)
......@@ -1696,13 +1697,15 @@ bool Item_func_group_concat::reset()
bool Item_func_group_concat::add()
{
if (always_null)
return 0;
copy_fields(tmp_table_param);
copy_funcs(tmp_table_param->items_to_copy);
bool record_is_null= TRUE;
for (uint i= 0; i < arg_show_fields; i++)
{
Item *show_item= expr[i];
Item *show_item= args[i];
if (!show_item->const_item())
{
Field *f= show_item->tmp_table_field();
......@@ -1718,13 +1721,13 @@ bool Item_func_group_concat::add()
null_value= FALSE;
if (tree_mode)
{
if (!tree_insert(tree, table->record[0], 0,tree->custom_arg))
if (!tree_insert(tree, table->record[0] + rec_offset, 0, tree->custom_arg))
return 1;
}
else
{
if (result.length() <= group_concat_max_len && !warning_for_row)
dump_leaf_key(table->record[0],1,
dump_leaf_key(table->record[0] + rec_offset, 1,
(Item_func_group_concat*)this);
}
return 0;
......@@ -1757,12 +1760,6 @@ Item_func_group_concat::fix_fields(THD *thd, TABLE_LIST *tables, Item **ref)
return 1;
maybe_null |= args[i]->maybe_null;
}
for (i= 0 ; i < arg_count_field ; i++)
{
if (expr[i]->fix_fields(thd, tables, expr + i) || expr[i]->check_cols(1))
return 1;
maybe_null |= expr[i]->maybe_null;
}
/*
Fix fields for order clause in function:
GROUP_CONCAT(expr,... ORDER BY col,... )
......@@ -1796,6 +1793,7 @@ bool Item_func_group_concat::setup(THD *thd)
/*
all not constant fields are push to list and create temp table
*/
always_null= 0;
for (uint i= 0; i < arg_count; i++)
{
Item *item= args[i];
......@@ -1808,6 +1806,8 @@ bool Item_func_group_concat::setup(THD *thd)
always_null= 1;
}
}
if (always_null)
return 0;
List<Item> all_fields(list);
if (arg_count_order)
......@@ -1827,12 +1827,25 @@ bool Item_func_group_concat::setup(THD *thd)
return 1;
table->file->extra(HA_EXTRA_NO_ROWS);
table->no_rows= 1;
qsort_cmp2 compare_key;
tree_mode= distinct || arg_count_order;
Field** field, **field_end;
field_end = (field = table->field) + table->fields;
uint offset = 0;
for (key_length = 0; field < field_end; ++field)
{
uint32 length= (*field)->pack_length();
(*field)->abs_offset= offset;
offset+= length;
key_length += length;
}
rec_offset = table->reclength - key_length;
/*
choise function of sort
*/
tree_mode= distinct || arg_count_order;
qsort_cmp2 compare_key;
if (tree_mode)
{
if (arg_count_order)
......@@ -1856,9 +1869,9 @@ bool Item_func_group_concat::setup(THD *thd)
*/
init_tree(tree, min(thd->variables.max_heap_table_size,
thd->variables.sortbuff_size/16), 0,
table->reclength, compare_key, 0, NULL, (void*) this);
max_elements_in_tree= ((table->reclength) ?
thd->variables.max_heap_table_size/table->reclength : 1);
key_length, compare_key, 0, NULL, (void*) this);
max_elements_in_tree= ((key_length) ?
thd->variables.max_heap_table_size/key_length : 1);
};
item_thd= thd;
......@@ -1909,3 +1922,6 @@ String* Item_func_group_concat::val_str(String* str)
}
return &result;
}
......@@ -644,13 +644,28 @@ class Item_func_group_concat : public Item_sum
uint max_elements_in_tree;
MYSQL_ERROR *warning;
bool warning_available;
uint key_length;
int rec_offset;
bool tree_mode;
bool distinct;
bool warning_for_row;
bool always_null;
friend int group_concat_key_cmp_with_distinct(void* arg, byte* key1,
byte* key2);
friend int group_concat_key_cmp_with_order(void* arg, byte* key1, byte* key2);
friend int group_concat_key_cmp_with_distinct_and_order(void* arg,
byte* key1,
byte* key2);
friend int dump_leaf_key(byte* key, uint32 count __attribute__((unused)),
Item_func_group_concat *group_concat_item);
public:
String result;
String *separator;
TREE tree_base;
TREE *tree;
TABLE *table;
Item **expr;
ORDER **order;
TABLE_LIST *tables_list;
ulong group_concat_max_len;
......@@ -659,9 +674,6 @@ class Item_func_group_concat : public Item_sum
uint arg_count_field;
uint arg_show_fields;
uint count_cut_values;
bool tree_mode, distinct;
bool warning_for_row;
bool always_null;
/*
Following is 0 normal object and pointer to original one for copy
(to correctly free resources)
......@@ -677,10 +689,14 @@ class Item_func_group_concat : public Item_sum
max_elements_in_tree(item.max_elements_in_tree),
warning(item.warning),
warning_available(item.warning_available),
key_length(item.key_length),
rec_offset(item.rec_offset),
tree_mode(0),
distinct(item.distinct),
warning_for_row(item.warning_for_row),
separator(item.separator),
tree(item.tree),
table(item.table),
expr(item.expr),
order(item.order),
tables_list(item.tables_list),
group_concat_max_len(item.group_concat_max_len),
......@@ -689,9 +705,6 @@ class Item_func_group_concat : public Item_sum
arg_count_field(item.arg_count_field),
arg_show_fields(item.arg_show_fields),
count_cut_values(item.count_cut_values),
tree_mode(0),
distinct(item.distinct),
warning_for_row(item.warning_for_row),
original(&item)
{
quick_group = 0;
......
......@@ -2151,7 +2151,7 @@ int setup_conds(THD *thd,TABLE_LIST *tables,COND **conds)
DBUG_ENTER("setup_conds");
thd->set_query_id=1;
thd->cond_count= 0;
thd->lex.current_select->cond_count= 0;
if (*conds)
{
thd->where="where clause";
......@@ -2169,7 +2169,7 @@ int setup_conds(THD *thd,TABLE_LIST *tables,COND **conds)
if (table->on_expr->fix_fields(thd, tables, &table->on_expr) ||
table->on_expr->check_cols(1))
DBUG_RETURN(1);
thd->cond_count++;
thd->lex.current_select->cond_count++;
/* If it's a normal join, add the ON/USING expression to the WHERE */
if (!table->outer_join)
......@@ -2215,7 +2215,7 @@ int setup_conds(THD *thd,TABLE_LIST *tables,COND **conds)
}
}
cond_and->used_tables_cache= t1->map | t2->map;
thd->cond_count+=cond_and->list.elements;
thd->lex.current_select->cond_count+=cond_and->list.elements;
if (!table->outer_join) // Not left join
{
if (!(*conds=and_conds(*conds, cond_and)))
......
......@@ -107,7 +107,6 @@ THD::THD():user_time(0), is_fatal_error(0),
slave_thread = 0;
variables.pseudo_thread_id= 0;
file_id = 0;
cond_count=0;
warn_id= 0;
db_charset= global_system_variables.character_set_database;
mysys_var=0;
......
......@@ -520,7 +520,7 @@ public:
ulong row_count; // Row counter, mainly for errors and warnings
long dbug_thread_id;
pthread_t real_id;
uint current_tablenr,tmp_table,cond_count;
uint current_tablenr,tmp_table;
uint server_status,open_options;
uint32 query_length;
uint32 db_length;
......
......@@ -967,6 +967,7 @@ void st_select_lex_node::init_query()
linkage= UNSPECIFIED_TYPE;
no_table_names_allowed= uncacheable= dependent= 0;
ref_pointer_array= 0;
cond_count= 0;
}
void st_select_lex_node::init_select()
......
......@@ -207,6 +207,7 @@ public:
Item **ref_pointer_array;
uint select_items; /* number of items in select_list */
uint cond_count; /* number of arguments of and/or/xor in where/having */
enum_parsing_place parsing_place; /* where we are parsing expression */
bool with_sum_func; /* sum function indicator */
bool dependent; /* dependent from outer select subselect */
......
......@@ -2170,7 +2170,8 @@ update_ref_and_keys(THD *thd, DYNAMIC_ARRAY *keyuse,JOIN_TAB *join_tab,
KEY_FIELD *key_fields,*end;
if (!(key_fields=(KEY_FIELD*)
thd->alloc(sizeof(key_fields[0])*(thd->cond_count+1)*2)))
thd->alloc(sizeof(key_fields[0])*
(thd->lex.current_select->cond_count+1)*2)))
return TRUE; /* purecov: inspected */
and_level=0; end=key_fields;
if (cond)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment