Commit 81a08c54 authored by Varun Gupta's avatar Varun Gupta

MDEV-11563: GROUP_CONCAT(DISTINCT ...) may produce a non-distinct list

 Backported from MYSQL
 Bug #25331425: DISTINCT CLAUSE DOES NOT WORK IN GROUP_CONCAT
    Issue:
    ------
    The problem occurs when:
    1) GROUP_CONCAT (DISTINCT ....) is used in the query.
    2) Data size greater than value of system variable:
    tmp_table_size.

    The result would contain values that are non-unique.

    Root cause:
    -----------
    An in-memory structure is used to filter out non-unique
    values. When the data size exceeds tmp_table_size, the
    overflow is written to disk as a separate file. The
    expectation here is that when all such files are merged,
    the full set of unique values can be obtained.

    But the Item_func_group_concat::add function is in a bit of
    hurry. Even as it is adding values to the tree, it wants to
    decide if a value is unique and write it to the result
    buffer. This works fine if the configured maximum size is
    greater than the size of the data. But since tmp_table_size
    is set to a low value, the size of the tree is smaller and
    hence requires the creation of multiple copies on disk.

    Item_func_group_concat currently has no mechanism to merge
    all the copies on disk and then generate the result. This
    results in duplicate values.

    Solution:
    ---------
    In case of the DISTINCT clause, don't write to the result
    buffer immediately. Do the merge and only then put the
    unique values in the result buffer. This has be done in
    Item_func_group_concat::val_str.

    Note regarding result file changes:
    -----------------------------------
    Earlier when a unique value was seen in
    Item_func_group_concat::add, it was dumped to the output.
    So result is in the order stored in SE. But with this fix,
    we wait until all the data is read and the final set of
    unique values are written to output buffer. So the data
    appears in the sorted order.

    This only fixes the cases when we have DISTINCT without ORDER BY clause
    in GROUP_CONCAT.
parent befb0bed
This diff is collapsed.
......@@ -986,6 +986,13 @@ select grp,group_concat(c order by c limit 2) from t1 group by grp;
select grp,group_concat(c order by c desc) from t1 group by grp;
select grp,group_concat(c order by c desc limit 2) from t1 group by grp;
--echo #
--echo # Empty results for group concat as offset is greater than the rows
--echo # for a group
--echo #
select grp,group_concat(distinct c limit 10,1 ) from t1 group by grp;
drop table t1;
create table t2 (a int, b varchar(10));
......
......@@ -3619,23 +3619,25 @@ int dump_leaf_key(void* key_arg, element_count count __attribute__((unused)),
ulonglong *offset_limit= &item->copy_offset_limit;
ulonglong *row_limit = &item->copy_row_limit;
if (item->limit_clause && !(*row_limit))
{
item->result_finalized= true;
return 1;
if (item->no_appended)
item->no_appended= FALSE;
else
result->append(*item->separator);
}
tmp.length(0);
if (item->limit_clause && (*offset_limit))
{
item->row_count++;
item->no_appended= TRUE;
(*offset_limit)--;
return 0;
}
if (!item->result_finalized)
item->result_finalized= true;
else
result->append(*item->separator);
for (; arg < arg_end; arg++)
{
String *res;
......@@ -3890,7 +3892,7 @@ void Item_func_group_concat::clear()
result.copy();
null_value= TRUE;
warning_for_row= FALSE;
no_appended= TRUE;
result_finalized= FALSE;
if (offset_limit)
copy_offset_limit= offset_limit->val_int();
if (row_limit)
......@@ -4023,13 +4025,12 @@ bool Item_func_group_concat::add()
return 1;
tree_len+= row_str_len;
}
/*
If the row is not a duplicate (el->count == 1)
we can dump the row here in case of GROUP_CONCAT(DISTINCT...)
instead of doing tree traverse later.
In case of GROUP_CONCAT with DISTINCT or ORDER BY (or both) don't dump the
row to the output buffer here. That will be done in val_str.
*/
if (row_eligible && !warning_for_row &&
(!tree || (el->count == 1 && distinct && !arg_count_order)))
if (row_eligible && !warning_for_row && (!tree && !distinct))
dump_leaf_key(table->record[0] + table->s->null_bytes, 1, this);
return 0;
......@@ -4264,9 +4265,18 @@ String* Item_func_group_concat::val_str(String* str)
DBUG_ASSERT(fixed == 1);
if (null_value)
return 0;
if (no_appended && tree)
/* Tree is used for sorting as in ORDER BY */
if (!result_finalized) // Result yet to be written.
{
if (tree != NULL) // order by
tree_walk(tree, &dump_leaf_key, this, left_root_right);
else if (distinct) // distinct (and no order by).
unique_filter->walk(table, &dump_leaf_key, this);
else if (row_limit && copy_row_limit == (ulonglong)row_limit->val_int())
return &result;
else
DBUG_ASSERT(false); // Can't happen
}
if (table && table->blob_storage &&
table->blob_storage->is_truncated_value())
......
......@@ -1813,7 +1813,8 @@ class Item_func_group_concat : public Item_sum
bool warning_for_row;
bool always_null;
bool force_copy_fields;
bool no_appended;
/** True if entire result of GROUP_CONCAT has been written to output buffer. */
bool result_finalized;
/** Limits the rows in the result */
Item *row_limit;
/** Skips a particular number of rows in from the result*/
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment