mirror of https://github.com/ByConity/ByConity
Merge branch 'master' into byconity_predict_hash_table
This commit is contained in:
commit
6b8697480c
|
@ -41,13 +41,54 @@ jobs:
|
|||
continue-on-error: true
|
||||
run: |
|
||||
bash ${GITHUB_WORKSPACE}/ci_scripts/cluster_up.sh
|
||||
echo $PATH
|
||||
ls /usr/bin
|
||||
bash ${GITHUB_WORKSPACE}/docker/test/foundationdb/show_fdb.sh
|
||||
export ADDITIONAL_OPTIONS=' --run cnch_stateless'
|
||||
export MAX_RUN_TIME=9000
|
||||
cp -r $GITHUB_WORKSPACE/docker/test/stateless/* .
|
||||
/bin/bash run_cnch.sh
|
||||
- name: Run CertificateStateful
|
||||
continue-on-error: true
|
||||
run: |
|
||||
mkdir -p ci_certificate_wp
|
||||
cd ci_certificate_wp
|
||||
cp -r ${GITHUB_WORKSPACE}/docker/test/certificate/* ./
|
||||
echo "Load tables for certificate - first batch" && python3 load_certificate_tables.py --suite-path ${GITHUB_WORKSPACE}/tests/queries/3_1_certificate_aeolus_bp_edu && python3 load_certificate_tables.py --suite-path ${GITHUB_WORKSPACE}/tests/queries/3_2_certificate_aeolus_delta && python3 load_certificate_tables.py --suite-path ${GITHUB_WORKSPACE}/tests/queries/3_3_certificate_datarocks && wait
|
||||
echo "Load tables for certificate - second batch" && python3 load_certificate_tables.py --suite-path ${GITHUB_WORKSPACE}/tests/queries/3_4_certificate_deepinsight && python3 load_certificate_tables.py --suite-path ${GITHUB_WORKSPACE}/tests/queries/3_5_certificate_ecom_data && python3 load_certificate_tables.py --suite-path ${GITHUB_WORKSPACE}/tests/queries/3_6_certificate_libra_hl && wait
|
||||
echo "Load tables for certificate - last batch" && python3 load_certificate_tables.py --suite-path ${GITHUB_WORKSPACE}/tests/queries/3_7_certificate_motor_dzx && wait
|
||||
echo "Load tables for certificates done"
|
||||
|
||||
cp ${GITHUB_WORKSPACE}/docker/test/stateless/process_functional_tests_result.py process_functional_tests_result.py
|
||||
cp -r ${GITHUB_WORKSPACE}/tests/clickhouse-test ./
|
||||
mkdir queries
|
||||
cp -r ${GITHUB_WORKSPACE}/tests/queries/3_1_certificate_aeolus_bp_edu queries/
|
||||
cp -r ${GITHUB_WORKSPACE}/tests/queries/3_2_certificate_aeolus_delta queries/
|
||||
cp -r ${GITHUB_WORKSPACE}/tests/queries/3_3_certificate_datarocks queries/
|
||||
cp -r ${GITHUB_WORKSPACE}/tests/queries/3_7_certificate_motor_dzx queries/
|
||||
cp -r ${GITHUB_WORKSPACE}/tests/queries/shell_config.sh queries/
|
||||
mkdir -p test_output
|
||||
./clickhouse-test --stop --hung-check --jobs 1 --order asc --print-time 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a test_output/test_result.txt
|
||||
./process_functional_tests_result.py --in-results-dir ./test_output/ --out-results-file /test_output/certificate_test_results.tsv --out-status-file /test_output/certificate_check_status.tsv || echo -e "failure\tCannot parse results" > /test_output/certificate_check_status.tsv
|
||||
cd ..
|
||||
rm -rf ci_certificate_wp
|
||||
- name: CNCH - FuntionalStateless (multi-workers + ClickhouseSQL + w/w.o optimizer)
|
||||
continue-on-error: true
|
||||
run: |
|
||||
mkdir -p ci_clickhouse_sql
|
||||
cd ci_clickhouse_sql
|
||||
cp -r ${GITHUB_WORKSPACE}/docker/test/stateless/process_functional_tests_result.py ./
|
||||
cp -r ${GITHUB_WORKSPACE}/tests/clickhouse-test ./
|
||||
mkdir queries
|
||||
cp -r ${GITHUB_WORKSPACE}/tests/queries/7_clickhouse_sql queries/
|
||||
cp -r ${GITHUB_WORKSPACE}/tests/queries/shell_config.sh queries/
|
||||
mkdir -p test_output
|
||||
echo "Running test without optimizer"
|
||||
./clickhouse-test --stop --hung-check --jobs 1 --order asc --print-time 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a test_output/test_result.txt
|
||||
./process_functional_tests_result.py --in-results-dir ./test_output/ --out-results-file /test_output/clickhouse_sql_test_results.tsv --out-status-file /test_output/clickhouse_sql_check_status.tsv || echo -e "failure\tCannot parse results" > /test_output/clickhouse_sql_check_status.tsv
|
||||
cd ..
|
||||
rm -rf ci_clickhouse_sql
|
||||
# not run test with optimizer enable at the moment
|
||||
# echo "Running test with optimizer"
|
||||
# ./clickhouse-test --stop --hung-check --jobs 1 --order asc --print-time --client-option 'enable_optimizer=1' 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a test_output/test_result.txt
|
||||
- name: After test
|
||||
continue-on-error: true
|
||||
run: |
|
||||
|
|
|
@ -30,7 +30,7 @@
|
|||
<keep_alive_timeout>3</keep_alive_timeout>
|
||||
|
||||
<cnch_type>server</cnch_type>
|
||||
<listen_host>ip_address_replace_me</listen_host>
|
||||
<listen_host>0.0.0.0</listen_host>
|
||||
|
||||
<!-- gRPC protocol (see src/Server/grpc_protos/clickhouse_grpc.proto for the API) -->
|
||||
<!-- <grpc_port>9100</grpc_port> -->
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
<keep_alive_timeout>3</keep_alive_timeout>
|
||||
|
||||
<cnch_type>worker</cnch_type>
|
||||
<listen_host>ip_address_replace_me</listen_host>
|
||||
<listen_host>0.0.0.0</listen_host>
|
||||
|
||||
<!-- gRPC protocol (see src/Server/grpc_protos/clickhouse_grpc.proto for the API) -->
|
||||
<!-- <grpc_port>9100</grpc_port> -->
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
<keep_alive_timeout>3</keep_alive_timeout>
|
||||
|
||||
<cnch_type>worker</cnch_type>
|
||||
<listen_host>ip_address_replace_me</listen_host>
|
||||
<listen_host>0.0.0.0</listen_host>
|
||||
|
||||
<!-- gRPC protocol (see src/Server/grpc_protos/clickhouse_grpc.proto for the API) -->
|
||||
<!-- <grpc_port>9100</grpc_port> -->
|
||||
|
|
|
@ -47,7 +47,7 @@ def main(args):
|
|||
return_code = os.system(item)
|
||||
if return_code != 0:
|
||||
print('Failed sql is:', item)
|
||||
raise f"error !"
|
||||
raise Exception("error !")
|
||||
print('Table:', table_name_db_name, 'created')
|
||||
|
||||
# TODO: import in parallel
|
||||
|
|
|
@ -70,9 +70,9 @@ namespace detail
|
|||
}
|
||||
|
||||
// Unlike HashTableGrower always grows to power of 2.
|
||||
struct UniqCombinedHashTableGrower : public HashTableGrower<>
|
||||
struct UniqCombinedHashTableGrower : public HashTableGrowerWithPrecalculation<>
|
||||
{
|
||||
void increaseSize() { ++size_degree; }
|
||||
void increaseSize() { increaseSizeDegree(1); }
|
||||
};
|
||||
|
||||
template <typename Key, UInt8 K>
|
||||
|
|
|
@ -147,10 +147,10 @@ namespace
|
|||
* separately.
|
||||
*/
|
||||
template <typename Key, typename Cell, typename Hash>
|
||||
class ReverseIndexHashTableBase : public HashTable<Key, Cell, Hash, HashTableGrower<>, HashTableAllocator>
|
||||
class ReverseIndexHashTableBase : public HashTable<Key, Cell, Hash, HashTableGrowerWithPrecalculation<>, HashTableAllocator>
|
||||
{
|
||||
using State = typename Cell::State;
|
||||
using Base = HashTable<Key, Cell, Hash, HashTableGrower<>, HashTableAllocator>;
|
||||
using Base = HashTable<Key, Cell, Hash, HashTableGrowerWithPrecalculation<>, HashTableAllocator>;
|
||||
|
||||
public:
|
||||
using Base::Base;
|
||||
|
|
|
@ -14,14 +14,12 @@ struct ClearableHashMapCell : public ClearableHashTableCell<Key, HashMapCell<Key
|
|||
: Base::BaseCell(value_, state), Base::version(state.version) {}
|
||||
};
|
||||
|
||||
template
|
||||
<
|
||||
template <
|
||||
typename Key,
|
||||
typename Mapped,
|
||||
typename Hash = DefaultHash<Key>,
|
||||
typename Grower = HashTableGrower<>,
|
||||
typename Allocator = HashTableAllocator
|
||||
>
|
||||
typename Grower = HashTableGrowerWithPrecalculation<>,
|
||||
typename Allocator = HashTableAllocator>
|
||||
class ClearableHashMap : public HashTable<Key, ClearableHashMapCell<Key, Mapped, Hash>, Hash, Grower, Allocator>
|
||||
{
|
||||
public:
|
||||
|
|
|
@ -48,14 +48,13 @@ struct ClearableHashTableCell : public BaseCell
|
|||
ClearableHashTableCell(const Key & key_, const State & state) : BaseCell(key_, state), version(state.version) {}
|
||||
};
|
||||
|
||||
template
|
||||
<
|
||||
template <
|
||||
typename Key,
|
||||
typename Hash = DefaultHash<Key>,
|
||||
typename Grower = HashTableGrower<>,
|
||||
typename Allocator = HashTableAllocator
|
||||
>
|
||||
class ClearableHashSet : public HashTable<Key, ClearableHashTableCell<Key, HashTableCell<Key, Hash, ClearableHashSetState>>, Hash, Grower, Allocator>
|
||||
typename Grower = HashTableGrowerWithPrecalculation<>,
|
||||
typename Allocator = HashTableAllocator>
|
||||
class ClearableHashSet
|
||||
: public HashTable<Key, ClearableHashTableCell<Key, HashTableCell<Key, Hash, ClearableHashSetState>>, Hash, Grower, Allocator>
|
||||
{
|
||||
public:
|
||||
using Base = HashTable<Key, ClearableHashTableCell<Key, HashTableCell<Key, Hash, ClearableHashSetState>>, Hash, Grower, Allocator>;
|
||||
|
@ -68,14 +67,17 @@ public:
|
|||
}
|
||||
};
|
||||
|
||||
template
|
||||
<
|
||||
template <
|
||||
typename Key,
|
||||
typename Hash = DefaultHash<Key>,
|
||||
typename Grower = HashTableGrower<>,
|
||||
typename Allocator = HashTableAllocator
|
||||
>
|
||||
class ClearableHashSetWithSavedHash: public HashTable<Key, ClearableHashTableCell<Key, HashSetCellWithSavedHash<Key, Hash, ClearableHashSetState>>, Hash, Grower, Allocator>
|
||||
typename Grower = HashTableGrowerWithPrecalculation<>,
|
||||
typename Allocator = HashTableAllocator>
|
||||
class ClearableHashSetWithSavedHash : public HashTable<
|
||||
Key,
|
||||
ClearableHashTableCell<Key, HashSetCellWithSavedHash<Key, Hash, ClearableHashSetState>>,
|
||||
Hash,
|
||||
Grower,
|
||||
Allocator>
|
||||
{
|
||||
public:
|
||||
void clear()
|
||||
|
@ -91,8 +93,4 @@ using ClearableHashSetWithStackMemory = ClearableHashSet<
|
|||
Hash,
|
||||
HashTableGrower<initial_size_degree>,
|
||||
HashTableAllocatorWithStackMemory<
|
||||
(1ULL << initial_size_degree)
|
||||
* sizeof(
|
||||
ClearableHashTableCell<
|
||||
Key,
|
||||
HashTableCell<Key, Hash, ClearableHashSetState>>)>>;
|
||||
(1ULL << initial_size_degree) * sizeof(ClearableHashTableCell<Key, HashTableCell<Key, Hash, ClearableHashSetState>>)>>;
|
||||
|
|
|
@ -174,7 +174,7 @@ template <
|
|||
typename Key,
|
||||
typename Cell,
|
||||
typename Hash = DefaultHash<Key>,
|
||||
typename Grower = HashTableGrower<>,
|
||||
typename Grower = HashTableGrowerWithPrecalculation<>,
|
||||
typename Allocator = HashTableAllocator>
|
||||
class HashMapTable : public HashTable<Key, Cell, Hash, Grower, Allocator>
|
||||
{
|
||||
|
@ -282,7 +282,7 @@ template <
|
|||
typename Key,
|
||||
typename Mapped,
|
||||
typename Hash = DefaultHash<Key>,
|
||||
typename Grower = HashTableGrower<>,
|
||||
typename Grower = HashTableGrowerWithPrecalculation<>,
|
||||
typename Allocator = HashTableAllocator>
|
||||
using HashMap = HashMapTable<Key, HashMapCell<Key, Mapped, Hash>, Hash, Grower, Allocator>;
|
||||
|
||||
|
@ -291,7 +291,7 @@ template <
|
|||
typename Key,
|
||||
typename Mapped,
|
||||
typename Hash = DefaultHash<Key>,
|
||||
typename Grower = HashTableGrower<>,
|
||||
typename Grower = HashTableGrowerWithPrecalculation<>,
|
||||
typename Allocator = HashTableAllocator>
|
||||
using HashMapWithSavedHash = HashMapTable<Key, HashMapCellWithSavedHash<Key, Mapped, Hash>, Hash, Grower, Allocator>;
|
||||
|
||||
|
|
|
@ -25,14 +25,12 @@ namespace ErrorCodes
|
|||
*/
|
||||
|
||||
|
||||
template
|
||||
<
|
||||
template <
|
||||
typename Key,
|
||||
typename TCell,
|
||||
typename Hash = DefaultHash<Key>,
|
||||
typename Grower = HashTableGrower<>,
|
||||
typename Allocator = HashTableAllocator
|
||||
>
|
||||
typename Grower = HashTableGrowerWithPrecalculation<>,
|
||||
typename Allocator = HashTableAllocator>
|
||||
class HashSetTable : public HashTable<Key, TCell, Hash, Grower, Allocator>
|
||||
{
|
||||
public:
|
||||
|
@ -134,13 +132,11 @@ struct HashSetCellWithSavedHash : public HashTableCell<Key, Hash, TState>
|
|||
size_t getHash(const Hash & /*hash_function*/) const { return saved_hash; }
|
||||
};
|
||||
|
||||
template
|
||||
<
|
||||
template <
|
||||
typename Key,
|
||||
typename Hash = DefaultHash<Key>,
|
||||
typename Grower = HashTableGrower<>,
|
||||
typename Allocator = HashTableAllocator
|
||||
>
|
||||
typename Grower = HashTableGrowerWithPrecalculation<>,
|
||||
typename Allocator = HashTableAllocator>
|
||||
using HashSet = HashSetTable<Key, HashTableCell<Key, Hash>, Hash, Grower, Allocator>;
|
||||
|
||||
template <
|
||||
|
@ -159,13 +155,11 @@ using HashSetWithStackMemory = HashSet<
|
|||
(1ULL << initial_size_degree)
|
||||
* sizeof(HashTableCell<Key, Hash>)>>;
|
||||
|
||||
template
|
||||
<
|
||||
template <
|
||||
typename Key,
|
||||
typename Hash = DefaultHash<Key>,
|
||||
typename Grower = HashTableGrower<>,
|
||||
typename Allocator = HashTableAllocator
|
||||
>
|
||||
typename Grower = HashTableGrowerWithPrecalculation<>,
|
||||
typename Allocator = HashTableAllocator>
|
||||
using HashSetWithSavedHash = HashSetTable<Key, HashSetCellWithSavedHash<Key, Hash>, Hash, Grower, Allocator>;
|
||||
|
||||
template <typename Key, typename Hash, size_t initial_size_degree>
|
||||
|
|
|
@ -249,6 +249,7 @@ void insertSetMapped(MappedType & dest, const ValueType & src) { dest = src.seco
|
|||
|
||||
|
||||
/** Determines the size of the hash table, and when and how much it should be resized.
|
||||
* Has very small state (one UInt8) and useful for Set-s allocated in automatic memory (see uniqExact as an example).
|
||||
*/
|
||||
template <size_t initial_size_degree = 8>
|
||||
struct HashTableGrower
|
||||
|
@ -298,6 +299,68 @@ struct HashTableGrower
|
|||
}
|
||||
};
|
||||
|
||||
/** Determines the size of the hash table, and when and how much it should be resized.
|
||||
* This structure is aligned to cache line boundary and also occupies it all.
|
||||
* Precalculates some values to speed up lookups and insertion into the HashTable (and thus has bigger memory footprint than HashTableGrower).
|
||||
*/
|
||||
template <size_t initial_size_degree = 8>
|
||||
class alignas(64) HashTableGrowerWithPrecalculation
|
||||
{
|
||||
/// The state of this structure is enough to get the buffer size of the hash table.
|
||||
|
||||
UInt8 size_degree = initial_size_degree;
|
||||
size_t precalculated_mask = (1ULL << initial_size_degree) - 1;
|
||||
size_t precalculated_max_fill = 1ULL << (initial_size_degree - 1);
|
||||
|
||||
public:
|
||||
UInt8 sizeDegree() const { return size_degree; }
|
||||
|
||||
void increaseSizeDegree(UInt8 delta)
|
||||
{
|
||||
size_degree += delta;
|
||||
precalculated_mask = (1ULL << size_degree) - 1;
|
||||
precalculated_max_fill = 1ULL << (size_degree - 1);
|
||||
}
|
||||
|
||||
static constexpr auto initial_count = 1ULL << initial_size_degree;
|
||||
|
||||
/// If collision resolution chains are contiguous, we can implement erase operation by moving the elements.
|
||||
static constexpr auto performs_linear_probing_with_single_step = true;
|
||||
|
||||
/// The size of the hash table in the cells.
|
||||
size_t bufSize() const { return 1ULL << size_degree; }
|
||||
|
||||
/// From the hash value, get the cell number in the hash table.
|
||||
size_t place(size_t x) const { return x & precalculated_mask; }
|
||||
|
||||
/// The next cell in the collision resolution chain.
|
||||
size_t next(size_t pos) const { return (pos + 1) & precalculated_mask; }
|
||||
|
||||
/// Whether the hash table is sufficiently full. You need to increase the size of the hash table, or remove something unnecessary from it.
|
||||
bool overflow(size_t elems) const { return elems > precalculated_max_fill; }
|
||||
|
||||
/// Increase the size of the hash table.
|
||||
void increaseSize() { increaseSizeDegree(size_degree >= 23 ? 1 : 2); }
|
||||
|
||||
/// Set the buffer size by the number of elements in the hash table. Used when deserializing a hash table.
|
||||
void set(size_t num_elems)
|
||||
{
|
||||
size_degree = num_elems <= 1
|
||||
? initial_size_degree
|
||||
: ((initial_size_degree > static_cast<size_t>(log2(num_elems - 1)) + 2)
|
||||
? initial_size_degree
|
||||
: (static_cast<size_t>(log2(num_elems - 1)) + 2));
|
||||
increaseSizeDegree(0);
|
||||
}
|
||||
|
||||
void setBufSize(size_t buf_size_)
|
||||
{
|
||||
size_degree = static_cast<size_t>(log2(buf_size_ - 1) + 1);
|
||||
increaseSizeDegree(0);
|
||||
}
|
||||
};
|
||||
|
||||
static_assert(sizeof(HashTableGrowerWithPrecalculation<>) == 64);
|
||||
|
||||
/** When used as a Grower, it turns a hash table into something like a lookup table.
|
||||
* It remains non-optimal - the cells store the keys.
|
||||
|
|
|
@ -78,20 +78,20 @@ struct LRUHashMapCellNodeTraits
|
|||
};
|
||||
|
||||
template <typename TKey, typename TValue, typename Disposer, typename Hash, bool save_hash_in_cells>
|
||||
class LRUHashMapImpl :
|
||||
private HashMapTable<
|
||||
TKey,
|
||||
LRUHashMapCell<TKey, TValue, Hash, save_hash_in_cells>,
|
||||
Hash,
|
||||
HashTableGrower<>,
|
||||
HashTableAllocator>
|
||||
class LRUHashMapImpl : private HashMapTable<
|
||||
TKey,
|
||||
LRUHashMapCell<TKey, TValue, Hash, save_hash_in_cells>,
|
||||
Hash,
|
||||
HashTableGrowerWithPrecalculation<>,
|
||||
HashTableAllocator>
|
||||
{
|
||||
using Base = HashMapTable<
|
||||
TKey,
|
||||
LRUHashMapCell<TKey, TValue, Hash, save_hash_in_cells>,
|
||||
Hash,
|
||||
HashTableGrower<>,
|
||||
HashTableGrowerWithPrecalculation<>,
|
||||
HashTableAllocator>;
|
||||
|
||||
public:
|
||||
using Key = TKey;
|
||||
using Value = TValue;
|
||||
|
|
|
@ -150,10 +150,10 @@ public:
|
|||
};
|
||||
|
||||
template <size_t initial_size_degree = 8>
|
||||
struct StringHashTableGrower : public HashTableGrower<initial_size_degree>
|
||||
struct StringHashTableGrower : public HashTableGrowerWithPrecalculation<initial_size_degree>
|
||||
{
|
||||
// Smooth growing for string maps
|
||||
void increaseSize() { this->size_degree += 1; }
|
||||
void increaseSize() { this->increaseSizeDegree(1); }
|
||||
};
|
||||
|
||||
template <typename Mapped>
|
||||
|
|
|
@ -15,13 +15,10 @@
|
|||
*/
|
||||
|
||||
template <size_t initial_size_degree = 8>
|
||||
struct TwoLevelHashTableGrower : public HashTableGrower<initial_size_degree>
|
||||
struct TwoLevelHashTableGrower : public HashTableGrowerWithPrecalculation<initial_size_degree>
|
||||
{
|
||||
/// Increase the size of the hash table.
|
||||
void increaseSize()
|
||||
{
|
||||
this->size_degree += this->size_degree >= 15 ? 1 : 2;
|
||||
}
|
||||
void increaseSize() { this->increaseSizeDegree(this->sizeDegree() >= 15 ? 1 : 2); }
|
||||
};
|
||||
|
||||
template
|
||||
|
|
|
@ -48,7 +48,7 @@ std::set<std::string> convertToSet(const HashTable & table)
|
|||
|
||||
TEST(HashTable, Insert)
|
||||
{
|
||||
using Cont = HashSet<int, DefaultHash<int>, HashTableGrower<1>>;
|
||||
using Cont = HashSet<int, DefaultHash<int>, HashTableGrowerWithPrecalculation<1>>;
|
||||
|
||||
Cont cont;
|
||||
|
||||
|
@ -60,7 +60,7 @@ TEST(HashTable, Insert)
|
|||
|
||||
TEST(HashTable, Emplace)
|
||||
{
|
||||
using Cont = HashSet<int, DefaultHash<int>, HashTableGrower<1>>;
|
||||
using Cont = HashSet<int, DefaultHash<int>, HashTableGrowerWithPrecalculation<1>>;
|
||||
|
||||
Cont cont;
|
||||
|
||||
|
@ -81,7 +81,7 @@ TEST(HashTable, Emplace)
|
|||
|
||||
TEST(HashTable, Lookup)
|
||||
{
|
||||
using Cont = HashSet<int, DefaultHash<int>, HashTableGrower<1>>;
|
||||
using Cont = HashSet<int, DefaultHash<int>, HashTableGrowerWithPrecalculation<1>>;
|
||||
|
||||
Cont cont;
|
||||
|
||||
|
@ -100,7 +100,7 @@ TEST(HashTable, Lookup)
|
|||
|
||||
TEST(HashTable, Iteration)
|
||||
{
|
||||
using Cont = HashSet<int, DefaultHash<int>, HashTableGrower<1>>;
|
||||
using Cont = HashSet<int, DefaultHash<int>, HashTableGrowerWithPrecalculation<1>>;
|
||||
|
||||
Cont cont;
|
||||
|
||||
|
@ -118,7 +118,7 @@ TEST(HashTable, Erase)
|
|||
{
|
||||
{
|
||||
/// Check zero element deletion
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrower<4>>;
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrowerWithPrecalculation<4>>;
|
||||
Cont cont;
|
||||
|
||||
cont.insert(0);
|
||||
|
@ -130,7 +130,7 @@ TEST(HashTable, Erase)
|
|||
ASSERT_TRUE(cont.find(0) == nullptr);
|
||||
}
|
||||
{
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrower<4>>;
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrowerWithPrecalculation<4>>;
|
||||
Cont cont;
|
||||
|
||||
/// [.(1)..............] erase of (1).
|
||||
|
@ -143,7 +143,7 @@ TEST(HashTable, Erase)
|
|||
ASSERT_TRUE(cont.find(1) == nullptr);
|
||||
}
|
||||
{
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrower<4>>;
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrowerWithPrecalculation<4>>;
|
||||
Cont cont;
|
||||
|
||||
/// [.(1)(2)(3)............] erase of (1) does not break search for (2) (3).
|
||||
|
@ -163,7 +163,7 @@ TEST(HashTable, Erase)
|
|||
ASSERT_EQ(cont.size(), 0);
|
||||
}
|
||||
{
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrower<4>>;
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrowerWithPrecalculation<4>>;
|
||||
Cont cont;
|
||||
|
||||
/// [.(1)(17).............] erase of (1) breaks search for (17) because their natural position is 1.
|
||||
|
@ -175,7 +175,7 @@ TEST(HashTable, Erase)
|
|||
ASSERT_TRUE(cont.find(17) != nullptr && cont.find(17)->getKey() == 17);
|
||||
}
|
||||
{
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrower<4>>;
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrowerWithPrecalculation<4>>;
|
||||
Cont cont;
|
||||
|
||||
/// [.(1)(2)(3)(17)...........] erase of (2) breaks search for (17) because their natural position is 1.
|
||||
|
@ -192,7 +192,7 @@ TEST(HashTable, Erase)
|
|||
ASSERT_TRUE(cont.find(17) != nullptr && cont.find(17)->getKey() == 17);
|
||||
}
|
||||
{
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrower<4>>;
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrowerWithPrecalculation<4>>;
|
||||
Cont cont;
|
||||
|
||||
/// [(16)(30)............(14)(15)] erase of (16) breaks search for (30) because their natural position is 14.
|
||||
|
@ -208,7 +208,7 @@ TEST(HashTable, Erase)
|
|||
ASSERT_TRUE(cont.find(30) != nullptr && cont.find(30)->getKey() == 30);
|
||||
}
|
||||
{
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrower<4>>;
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrowerWithPrecalculation<4>>;
|
||||
Cont cont;
|
||||
|
||||
/// [(16)(30)............(14)(15)] erase of (15) breaks search for (30) because their natural position is 14.
|
||||
|
@ -224,7 +224,7 @@ TEST(HashTable, Erase)
|
|||
ASSERT_TRUE(cont.find(30) != nullptr && cont.find(30)->getKey() == 30);
|
||||
}
|
||||
{
|
||||
using Cont = HashSet<int, DefaultHash<int>, HashTableGrower<1>>;
|
||||
using Cont = HashSet<int, DefaultHash<int>, HashTableGrowerWithPrecalculation<1>>;
|
||||
Cont cont;
|
||||
|
||||
for (size_t i = 0; i < 5000; ++i)
|
||||
|
@ -260,7 +260,7 @@ TEST(HashTable, SerializationDeserialization)
|
|||
{
|
||||
{
|
||||
/// Use dummy hash to make it reproducible if default hash implementation will be changed
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrower<1>>;
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrowerWithPrecalculation<1>>;
|
||||
|
||||
Cont cont;
|
||||
|
||||
|
@ -282,7 +282,7 @@ TEST(HashTable, SerializationDeserialization)
|
|||
ASSERT_EQ(convertToSet(cont), convertToSet(deserialized));
|
||||
}
|
||||
{
|
||||
using Cont = HashSet<int, DefaultHash<int>, HashTableGrower<1>>;
|
||||
using Cont = HashSet<int, DefaultHash<int>, HashTableGrowerWithPrecalculation<1>>;
|
||||
|
||||
Cont cont;
|
||||
|
||||
|
@ -300,7 +300,7 @@ TEST(HashTable, SerializationDeserialization)
|
|||
ASSERT_EQ(convertToSet(cont), convertToSet(deserialized));
|
||||
}
|
||||
{
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrower<1>>;
|
||||
using Cont = HashSet<int, DummyHash<int>, HashTableGrowerWithPrecalculation<1>>;
|
||||
Cont cont;
|
||||
|
||||
WriteBufferFromOwnString wb;
|
||||
|
|
|
@ -72,13 +72,13 @@ PlanNodeStatisticsPtr AggregateEstimator::estimate(PlanNodeStatisticsPtr & child
|
|||
name_to_type[item.name] = item.type;
|
||||
}
|
||||
const AggregateDescriptions & agg_descs = step.getAggregates();
|
||||
for (auto & agg_desc : agg_descs)
|
||||
for (const auto & agg_desc : agg_descs)
|
||||
{
|
||||
symbol_statistics[agg_desc.column_name]
|
||||
= AggregateEstimator::estimateAggFun(agg_desc.function, row_count, name_to_type[agg_desc.column_name]);
|
||||
}
|
||||
|
||||
return std::make_shared<PlanNodeStatistics>(row_count, symbol_statistics);
|
||||
return std::make_shared<PlanNodeStatistics>(row_count, std::move(symbol_statistics));
|
||||
}
|
||||
|
||||
PlanNodeStatisticsPtr AggregateEstimator::estimate(PlanNodeStatisticsPtr & child_stats, const MergingAggregatedStep & step)
|
||||
|
@ -104,7 +104,7 @@ PlanNodeStatisticsPtr AggregateEstimator::estimate(PlanNodeStatisticsPtr & child
|
|||
= AggregateEstimator::estimateAggFun(agg_desc.function, row_count, name_to_type[agg_desc.column_name]);
|
||||
}
|
||||
|
||||
return std::make_shared<PlanNodeStatistics>(row_count, symbol_statistics);
|
||||
return std::make_shared<PlanNodeStatistics>(row_count, std::move(symbol_statistics));
|
||||
}
|
||||
|
||||
PlanNodeStatisticsPtr AggregateEstimator::estimate(PlanNodeStatisticsPtr & child_stats, const DistinctStep & step)
|
||||
|
@ -125,7 +125,7 @@ PlanNodeStatisticsPtr AggregateEstimator::estimate(PlanNodeStatisticsPtr & child
|
|||
row_count = std::min(row_count, double(limit));
|
||||
}
|
||||
|
||||
return std::make_shared<PlanNodeStatistics>(row_count, symbol_statistics);
|
||||
return std::make_shared<PlanNodeStatistics>(row_count, std::move(symbol_statistics));
|
||||
}
|
||||
|
||||
SymbolStatisticsPtr AggregateEstimator::estimateAggFun(AggregateFunctionPtr fun, UInt64 row_count, DataTypePtr data_type)
|
||||
|
|
|
@ -34,7 +34,7 @@ PlanNodeStatisticsPtr AssignUniqueIdEstimator::estimate(PlanNodeStatisticsPtr &
|
|||
}
|
||||
symbol_statistics[unique_symbol] = unique_stats;
|
||||
|
||||
auto stats = std::make_shared<PlanNodeStatistics>(child_stats->getRowCount(), symbol_statistics);
|
||||
auto stats = std::make_shared<PlanNodeStatistics>(child_stats->getRowCount(), std::move(symbol_statistics));
|
||||
return stats;
|
||||
}
|
||||
|
||||
|
|
|
@ -154,7 +154,6 @@ PlanNodeStatisticsPtr CardinalityVisitor::visitExceptStep(const ExceptStep &, Ca
|
|||
|
||||
PlanNodeStatisticsPtr CardinalityVisitor::visitExchangeStep(const ExchangeStep & step, CardinalityContext & context)
|
||||
{
|
||||
std::vector<PlanNodeStatisticsPtr> children_stats;
|
||||
PlanNodeStatisticsPtr stats = ExchangeEstimator::estimate(context.children_stats, step);
|
||||
return stats;
|
||||
}
|
||||
|
@ -267,9 +266,9 @@ PlanNodeStatisticsPtr CardinalityVisitor::visitCTERefStep(const CTERefStep & ste
|
|||
|
||||
auto & stats = result.value();
|
||||
std::unordered_map<String, SymbolStatisticsPtr> calculated_symbol_statistics;
|
||||
for (auto & item : step.getOutputColumns())
|
||||
for (const auto & item : step.getOutputColumns())
|
||||
calculated_symbol_statistics[item.first] = stats->getSymbolStatistics(item.second);
|
||||
return std::make_shared<PlanNodeStatistics>(stats->getRowCount(), calculated_symbol_statistics);
|
||||
return std::make_shared<PlanNodeStatistics>(stats->getRowCount(), std::move(calculated_symbol_statistics));
|
||||
}
|
||||
|
||||
PlanNodeStatisticsPtr CardinalityVisitor::visitEnforceSingleRowStep(const EnforceSingleRowStep & step, CardinalityContext & context)
|
||||
|
|
|
@ -20,7 +20,7 @@ namespace DB
|
|||
PlanNodeStatisticsPtr ExchangeEstimator::estimate(std::vector<PlanNodeStatisticsPtr> & children_stats, const ExchangeStep & step)
|
||||
{
|
||||
PlanNodeStatisticsPtr output;
|
||||
auto & out_to_input = step.getOutToInputs();
|
||||
const auto & out_to_input = step.getOutToInputs();
|
||||
for (size_t i = 0; i < children_stats.size(); i++)
|
||||
{
|
||||
if (!children_stats.at(i))
|
||||
|
@ -47,14 +47,14 @@ PlanNodeStatisticsPtr ExchangeEstimator::mapToOutput(
|
|||
{
|
||||
std::unordered_map<String, SymbolStatisticsPtr> output_symbol_statistics;
|
||||
|
||||
for (auto & symbol : out_to_input)
|
||||
for (const auto & symbol : out_to_input)
|
||||
{
|
||||
String output_symbol = symbol.first;
|
||||
auto & input_symbols = symbol.second;
|
||||
const auto & input_symbols = symbol.second;
|
||||
output_symbol_statistics[output_symbol] = child_stats->getSymbolStatistics(input_symbols.at(index))->copy();
|
||||
}
|
||||
|
||||
return std::make_shared<PlanNodeStatistics>(child_stats->getRowCount(), output_symbol_statistics);
|
||||
return std::make_shared<PlanNodeStatistics>(child_stats->getRowCount(), std::move(output_symbol_statistics));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -119,7 +119,7 @@ double FilterEstimator::estimateFilterSelectivity(
|
|||
|
||||
ConstASTPtr tryGetIdentifier(ConstASTPtr node)
|
||||
{
|
||||
if (auto cast_func = node->as<ASTFunction>())
|
||||
if (const auto * cast_func = node->as<ASTFunction>())
|
||||
{
|
||||
if (Poco::toLower(cast_func->name) == "cast")
|
||||
{
|
||||
|
@ -151,7 +151,7 @@ FilterEstimateResult FilterEstimator::estimateFilter(PlanNodeStatistics & stats,
|
|||
{
|
||||
return {1.0, {}};
|
||||
}
|
||||
auto & function = predicate->as<const ASTFunction &>();
|
||||
const auto & function = predicate->as<const ASTFunction &>();
|
||||
if (function.name == "and")
|
||||
{
|
||||
return estimateAndFilter(stats, predicate, context);
|
||||
|
@ -200,7 +200,7 @@ FilterEstimator::estimateAndFilter(PlanNodeStatistics & stats, ConstASTPtr & pre
|
|||
}
|
||||
}
|
||||
|
||||
return {selectivity, and_symbol_statistics};
|
||||
return {selectivity, std::move(and_symbol_statistics)};
|
||||
}
|
||||
|
||||
FilterEstimateResult
|
||||
|
@ -245,7 +245,7 @@ FilterEstimator::estimateOrFilter(PlanNodeStatistics & stats, ConstASTPtr & pred
|
|||
symbol_statistics[symbol] = first_value;
|
||||
}
|
||||
}
|
||||
return {selectivity, symbol_statistics};
|
||||
return {selectivity, std::move(symbol_statistics)};
|
||||
}
|
||||
|
||||
FilterEstimateResult
|
||||
|
@ -262,7 +262,7 @@ FilterEstimator::estimateNotFilter(PlanNodeStatistics & stats, ConstASTPtr & pre
|
|||
SymbolStatisticsPtr origin = stats.getSymbolStatistics(symbol);
|
||||
not_symbol_statistics[symbol] = symbol_statistics.second->createNot(origin);
|
||||
}
|
||||
return {1.0 - result.first, not_symbol_statistics};
|
||||
return {1.0 - result.first, std::move(not_symbol_statistics)};
|
||||
}
|
||||
|
||||
std::unordered_map<String, std::vector<SymbolStatisticsPtr>> FilterEstimator::combineSymbolStatistics(FilterEstimateResults & results)
|
||||
|
@ -293,7 +293,7 @@ std::unordered_map<String, std::vector<SymbolStatisticsPtr>> FilterEstimator::co
|
|||
FilterEstimateResult
|
||||
FilterEstimator::estimateSingleFilter(PlanNodeStatistics & stats, ConstASTPtr & predicate, FilterEstimatorContext & context)
|
||||
{
|
||||
auto & function = predicate->as<const ASTFunction &>();
|
||||
const auto & function = predicate->as<const ASTFunction &>();
|
||||
if (function.name == "equals")
|
||||
{
|
||||
return estimateEqualityFilter(stats, predicate, context);
|
||||
|
@ -354,7 +354,7 @@ FilterEstimator::estimateSingleFilter(PlanNodeStatistics & stats, ConstASTPtr &
|
|||
FilterEstimateResult
|
||||
FilterEstimator::estimateEqualityFilter(PlanNodeStatistics & stats, ConstASTPtr & predicate, FilterEstimatorContext & context)
|
||||
{
|
||||
auto & function = predicate->as<const ASTFunction &>();
|
||||
const auto & function = predicate->as<const ASTFunction &>();
|
||||
|
||||
ConstASTPtr left = tryGetIdentifier(function.arguments->getChildren()[0]);
|
||||
std::optional<Field> field = context.calculateConstantExpression(function.arguments->getChildren()[1]);
|
||||
|
@ -366,7 +366,7 @@ FilterEstimator::estimateEqualityFilter(PlanNodeStatistics & stats, ConstASTPtr
|
|||
return {1.0, {}};
|
||||
}
|
||||
|
||||
auto & identifier = left->as<ASTIdentifier &>();
|
||||
const auto & identifier = left->as<ASTIdentifier &>();
|
||||
String symbol = identifier.name();
|
||||
Field literal = *field;
|
||||
|
||||
|
@ -397,7 +397,7 @@ FilterEstimator::estimateEqualityFilter(PlanNodeStatistics & stats, ConstASTPtr
|
|||
selectivity = symbol_statistics.estimateEqualFilter(value);
|
||||
std::unordered_map<std::string, SymbolStatisticsPtr> filtered_symbol_statistics
|
||||
= {{symbol, symbol_statistics.createEqualFilter(value)}};
|
||||
return {selectivity, filtered_symbol_statistics};
|
||||
return {selectivity, std::move(filtered_symbol_statistics)};
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -419,7 +419,7 @@ FilterEstimator::estimateEqualityFilter(PlanNodeStatistics & stats, ConstASTPtr
|
|||
selectivity = symbol_statistics.estimateEqualFilter(value);
|
||||
std::unordered_map<std::string, SymbolStatisticsPtr> filtered_symbol_statistics
|
||||
= {{symbol, symbol_statistics.createEqualFilter(value)}};
|
||||
return {selectivity, filtered_symbol_statistics};
|
||||
return {selectivity, std::move(filtered_symbol_statistics)};
|
||||
}
|
||||
return {1.0, {}};
|
||||
}
|
||||
|
@ -427,7 +427,7 @@ FilterEstimator::estimateEqualityFilter(PlanNodeStatistics & stats, ConstASTPtr
|
|||
FilterEstimateResult
|
||||
FilterEstimator::estimateNotEqualityFilter(PlanNodeStatistics & stats, ConstASTPtr & predicate, FilterEstimatorContext & context)
|
||||
{
|
||||
auto & function = predicate->as<ASTFunction &>();
|
||||
const auto & function = predicate->as<ASTFunction &>();
|
||||
|
||||
ConstASTPtr left = tryGetIdentifier(function.arguments->getChildren()[0]);
|
||||
std::optional<Field> field = context.calculateConstantExpression(function.arguments->getChildren()[1]);
|
||||
|
@ -439,7 +439,7 @@ FilterEstimator::estimateNotEqualityFilter(PlanNodeStatistics & stats, ConstASTP
|
|||
return {1.0, {}};
|
||||
}
|
||||
|
||||
auto & identifier = left->as<ASTIdentifier &>();
|
||||
const auto & identifier = left->as<ASTIdentifier &>();
|
||||
String symbol = identifier.name();
|
||||
Field literal = *field;
|
||||
|
||||
|
@ -469,7 +469,7 @@ FilterEstimator::estimateNotEqualityFilter(PlanNodeStatistics & stats, ConstASTP
|
|||
selectivity = symbol_statistics.estimateNotEqualFilter(value);
|
||||
std::unordered_map<std::string, SymbolStatisticsPtr> filtered_symbol_statistics
|
||||
= {{symbol, symbol_statistics.createNotEqualFilter(value)}};
|
||||
return {selectivity, filtered_symbol_statistics};
|
||||
return {selectivity, std::move(filtered_symbol_statistics)};
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -488,7 +488,7 @@ FilterEstimator::estimateNotEqualityFilter(PlanNodeStatistics & stats, ConstASTP
|
|||
selectivity = symbol_statistics.estimateNotEqualFilter(value);
|
||||
std::unordered_map<std::string, SymbolStatisticsPtr> filtered_symbol_statistics
|
||||
= {{symbol, symbol_statistics.createNotEqualFilter(value)}};
|
||||
return {selectivity, filtered_symbol_statistics};
|
||||
return {selectivity, std::move(filtered_symbol_statistics)};
|
||||
}
|
||||
return {1.0, {}};
|
||||
}
|
||||
|
@ -496,7 +496,7 @@ FilterEstimator::estimateNotEqualityFilter(PlanNodeStatistics & stats, ConstASTP
|
|||
FilterEstimateResult
|
||||
FilterEstimator::estimateRangeFilter(PlanNodeStatistics & stats, ConstASTPtr & predicate, FilterEstimatorContext & context)
|
||||
{
|
||||
auto & function = predicate->as<ASTFunction &>();
|
||||
const auto & function = predicate->as<ASTFunction &>();
|
||||
|
||||
ConstASTPtr left = tryGetIdentifier(function.arguments->getChildren()[0]);
|
||||
std::optional<Field> field = context.calculateConstantExpression(function.arguments->getChildren()[1]);
|
||||
|
@ -508,7 +508,7 @@ FilterEstimator::estimateRangeFilter(PlanNodeStatistics & stats, ConstASTPtr & p
|
|||
return {1.0, {}};
|
||||
}
|
||||
|
||||
auto & identifier = left->as<ASTIdentifier &>();
|
||||
const auto & identifier = left->as<ASTIdentifier &>();
|
||||
String symbol = identifier.name();
|
||||
Field literal = *field;
|
||||
|
||||
|
@ -560,8 +560,8 @@ FilterEstimator::estimateRangeFilter(PlanNodeStatistics & stats, ConstASTPtr & p
|
|||
return {1.0, {}};
|
||||
}
|
||||
|
||||
std::unordered_map<std::string, SymbolStatisticsPtr> filtered_symbol_statistics = {{symbol, filtered_statistics}};
|
||||
return {selectivity, filtered_symbol_statistics};
|
||||
std::unordered_map<std::string, SymbolStatisticsPtr> filtered_symbol_statistics = {{symbol, std::move(filtered_statistics)}};
|
||||
return {selectivity, std::move(filtered_symbol_statistics)};
|
||||
}
|
||||
else if (symbol_statistics.isString())
|
||||
{
|
||||
|
@ -574,7 +574,7 @@ FilterEstimator::estimateRangeFilter(PlanNodeStatistics & stats, ConstASTPtr & p
|
|||
FilterEstimateResult
|
||||
FilterEstimator::estimateInFilter(PlanNodeStatistics & stats, ConstASTPtr & predicate, FilterEstimatorContext & context)
|
||||
{
|
||||
auto & function = predicate->as<ASTFunction &>();
|
||||
const auto & function = predicate->as<ASTFunction &>();
|
||||
bool match = function.arguments->getChildren()[0]->as<ASTIdentifier>() && function.arguments->getChildren()[1]->as<ASTFunction>();
|
||||
if (!match)
|
||||
{
|
||||
|
@ -637,7 +637,7 @@ FilterEstimator::estimateInFilter(PlanNodeStatistics & stats, ConstASTPtr & pred
|
|||
}
|
||||
std::unordered_map<std::string, SymbolStatisticsPtr> filtered_symbol_statistics
|
||||
= {{symbol, symbol_statistics.createInFilter(values, has_null_value)}};
|
||||
return {in_values_selectivity, filtered_symbol_statistics};
|
||||
return {in_values_selectivity, std::move(filtered_symbol_statistics)};
|
||||
}
|
||||
else if (symbol_statistics.isString())
|
||||
{
|
||||
|
@ -659,7 +659,7 @@ FilterEstimator::estimateInFilter(PlanNodeStatistics & stats, ConstASTPtr & pred
|
|||
double in_values_selectivity = symbol_statistics.estimateInFilter(str_values, has_null_value, stats.getRowCount());
|
||||
std::unordered_map<std::string, SymbolStatisticsPtr> filtered_symbol_statistics
|
||||
= {{symbol, symbol_statistics.createInFilter(str_values, has_null_value)}};
|
||||
return {in_values_selectivity, filtered_symbol_statistics};
|
||||
return {in_values_selectivity, std::move(filtered_symbol_statistics)};
|
||||
}
|
||||
return {1.0, {}};
|
||||
}
|
||||
|
@ -667,7 +667,7 @@ FilterEstimator::estimateInFilter(PlanNodeStatistics & stats, ConstASTPtr & pred
|
|||
FilterEstimateResult
|
||||
FilterEstimator::estimateNotInFilter(PlanNodeStatistics & stats, ConstASTPtr & predicate, FilterEstimatorContext & context)
|
||||
{
|
||||
auto & function = predicate->as<ASTFunction &>();
|
||||
const auto & function = predicate->as<ASTFunction &>();
|
||||
bool match = function.arguments->getChildren()[0]->as<ASTIdentifier>() && function.arguments->getChildren()[1]->as<ASTFunction>();
|
||||
if (!match)
|
||||
{
|
||||
|
@ -729,7 +729,7 @@ FilterEstimator::estimateNotInFilter(PlanNodeStatistics & stats, ConstASTPtr & p
|
|||
}
|
||||
std::unordered_map<std::string, SymbolStatisticsPtr> filtered_symbol_statistics
|
||||
= {{symbol, symbol_statistics.createNotInFilter(values, has_null_value)}};
|
||||
return {not_in_values_selectivity, filtered_symbol_statistics};
|
||||
return {not_in_values_selectivity, std::move(filtered_symbol_statistics)};
|
||||
}
|
||||
else if (symbol_statistics.isString())
|
||||
{
|
||||
|
@ -754,14 +754,14 @@ FilterEstimator::estimateNotInFilter(PlanNodeStatistics & stats, ConstASTPtr & p
|
|||
double not_in_values_selectivity = symbol_statistics.estimateNotInFilter(str_values, has_null_value, stats.getRowCount());
|
||||
std::unordered_map<std::string, SymbolStatisticsPtr> filtered_symbol_statistics
|
||||
= {{symbol, symbol_statistics.createNotInFilter(str_values, has_null_value)}};
|
||||
return {not_in_values_selectivity, filtered_symbol_statistics};
|
||||
return {not_in_values_selectivity, std::move(filtered_symbol_statistics)};
|
||||
}
|
||||
return {1.0, {}};
|
||||
}
|
||||
|
||||
FilterEstimateResult FilterEstimator::estimateNullFilter(PlanNodeStatistics & stats, ConstASTPtr & predicate, FilterEstimatorContext &)
|
||||
{
|
||||
auto & function = predicate->as<ASTFunction &>();
|
||||
const auto & function = predicate->as<ASTFunction &>();
|
||||
ConstASTPtr left = tryGetIdentifier(function.arguments->getChildren()[0]);
|
||||
bool match = left->as<ASTIdentifier>();
|
||||
|
||||
|
@ -770,7 +770,7 @@ FilterEstimateResult FilterEstimator::estimateNullFilter(PlanNodeStatistics & st
|
|||
return {1.0, {}};
|
||||
}
|
||||
|
||||
auto & identifier = left->as<ASTIdentifier &>();
|
||||
const auto & identifier = left->as<ASTIdentifier &>();
|
||||
String symbol = identifier.name();
|
||||
|
||||
SymbolStatistics & symbol_statistics = *stats.getSymbolStatistics(symbol);
|
||||
|
@ -785,18 +785,18 @@ FilterEstimateResult FilterEstimator::estimateNullFilter(PlanNodeStatistics & st
|
|||
{
|
||||
selectivity = symbol_statistics.estimateNullFilter(stats.getRowCount());
|
||||
std::unordered_map<std::string, SymbolStatisticsPtr> filtered_symbol_statistics = {{symbol, symbol_statistics.createNullFilter()}};
|
||||
return {selectivity, filtered_symbol_statistics};
|
||||
return {selectivity, std::move(filtered_symbol_statistics)};
|
||||
}
|
||||
|
||||
// if data type is not nullable, null filter will return empty.
|
||||
selectivity = 0.0;
|
||||
std::unordered_map<std::string, SymbolStatisticsPtr> symbol_stats = {{symbol, symbol_statistics.createEmpty()}};
|
||||
return {selectivity, symbol_stats};
|
||||
return {selectivity, std::move(symbol_stats)};
|
||||
}
|
||||
|
||||
FilterEstimateResult FilterEstimator::estimateNotNullFilter(PlanNodeStatistics & stats, ConstASTPtr & predicate, FilterEstimatorContext &)
|
||||
{
|
||||
auto & function = predicate->as<ASTFunction &>();
|
||||
const auto & function = predicate->as<ASTFunction &>();
|
||||
|
||||
ConstASTPtr left = tryGetIdentifier(function.arguments->getChildren()[0]);
|
||||
bool match = left->as<ASTIdentifier>();
|
||||
|
@ -806,7 +806,7 @@ FilterEstimateResult FilterEstimator::estimateNotNullFilter(PlanNodeStatistics &
|
|||
return {1.0, {}};
|
||||
}
|
||||
|
||||
auto & identifier = left->as<ASTIdentifier &>();
|
||||
const auto & identifier = left->as<ASTIdentifier &>();
|
||||
String symbol = identifier.name();
|
||||
|
||||
SymbolStatistics & symbol_statistics = *stats.getSymbolStatistics(symbol);
|
||||
|
@ -824,7 +824,7 @@ FilterEstimateResult FilterEstimator::estimateNotNullFilter(PlanNodeStatistics &
|
|||
|
||||
selectivity = symbol_statistics.estimateNotNullFilter(stats.getRowCount());
|
||||
std::unordered_map<std::string, SymbolStatisticsPtr> filtered_symbol_statistics = {{symbol, symbol_statistics.createNotNullFilter()}};
|
||||
return {selectivity, filtered_symbol_statistics};
|
||||
return {selectivity, std::move(filtered_symbol_statistics)};
|
||||
}
|
||||
|
||||
// TODO support dynamic sample for complex predicate @gouguiling
|
||||
|
|
|
@ -80,7 +80,7 @@ PlanNodeStatisticsPtr JoinEstimator::computeCardinality(
|
|||
join_output_statistics[item.first] = item.second->applySelectivity(left_rows, 1);
|
||||
}
|
||||
|
||||
return std::make_shared<PlanNodeStatistics>(join_card, join_output_statistics);
|
||||
return std::make_shared<PlanNodeStatistics>(join_card, std::move(join_output_statistics));
|
||||
}
|
||||
|
||||
// inner/left/right/full join
|
||||
|
@ -169,7 +169,7 @@ PlanNodeStatisticsPtr JoinEstimator::computeCardinality(
|
|||
if (pre_key_join_card <= join_card)
|
||||
{
|
||||
join_card = pre_key_join_card;
|
||||
join_output_statistics = pre_key_join_output_statistics;
|
||||
join_output_statistics.swap(pre_key_join_output_statistics);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ SymbolStatisticsPtr PlanNodeStatistics::getSymbolStatistics(const String & symbo
|
|||
UInt64 PlanNodeStatistics::getOutputSizeInBytes() const
|
||||
{
|
||||
size_t row_size = 0;
|
||||
for (auto & symbols : symbol_statistics)
|
||||
for (const auto & symbols : symbol_statistics)
|
||||
{
|
||||
if (!symbols.second->isUnknown())
|
||||
{
|
||||
|
@ -52,7 +52,7 @@ String PlanNodeStatistics::toString() const
|
|||
details << "RowCount: " << row_count << "\\n";
|
||||
details << "DataSize: " << std::to_string(getOutputSizeInBytes()) << "\\n";
|
||||
details << "Symbol\\n";
|
||||
for (auto & symbol : symbol_statistics)
|
||||
for (const auto & symbol : symbol_statistics)
|
||||
{
|
||||
details << symbol.first << ": " << symbol.second->getNdv() << ", " << symbol.second->getMin() << ", " << symbol.second->getMax()
|
||||
<< ", hist:" << symbol.second->getHistogram().getBuckets().size() << "\\n";
|
||||
|
@ -66,7 +66,7 @@ Poco::JSON::Object::Ptr PlanNodeStatistics::toJson() const
|
|||
json->set("rowCont", row_count);
|
||||
|
||||
Poco::JSON::Array symbol_statistics_json_array;
|
||||
for (auto & item : symbol_statistics)
|
||||
for (const auto & item : symbol_statistics)
|
||||
{
|
||||
Poco::JSON::Object::Ptr symbol_statistics_json = new Poco::JSON::Object;
|
||||
symbol_statistics_json->set("symbol", item.first);
|
||||
|
@ -74,7 +74,7 @@ Poco::JSON::Object::Ptr PlanNodeStatistics::toJson() const
|
|||
symbol_statistics_json_array.add(symbol_statistics_json);
|
||||
}
|
||||
|
||||
json->set("symbolStatistics", symbol_statistics_json_array);
|
||||
json->set("symbolStatistics", std::move(symbol_statistics_json_array));
|
||||
return json;
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ PlanNodeStatisticsPtr ProjectionEstimator::estimate(PlanNodeStatisticsPtr & chil
|
|||
|
||||
std::unordered_map<String, SymbolStatisticsPtr> & symbol_statistics = project_stats->getSymbolStatistics();
|
||||
std::unordered_map<String, SymbolStatisticsPtr> calculated_symbol_statistics;
|
||||
for (auto & assignment : step.getAssignments())
|
||||
for (const auto & assignment : step.getAssignments())
|
||||
{
|
||||
auto result = ScalarStatsCalculator::estimate(
|
||||
assignment.second, name_to_type.at(assignment.first), project_stats->getRowCount(), symbol_statistics);
|
||||
|
@ -48,7 +48,7 @@ PlanNodeStatisticsPtr ProjectionEstimator::estimate(PlanNodeStatisticsPtr & chil
|
|||
}
|
||||
}
|
||||
|
||||
return std::make_shared<PlanNodeStatistics>(project_stats->getRowCount(), calculated_symbol_statistics);
|
||||
return std::make_shared<PlanNodeStatistics>(project_stats->getRowCount(), std::move(calculated_symbol_statistics));
|
||||
}
|
||||
|
||||
SymbolStatisticsPtr ScalarStatsCalculator::estimate(
|
||||
|
@ -66,7 +66,7 @@ SymbolStatisticsPtr ScalarStatsCalculator::visitNode(const ConstASTPtr &, std::u
|
|||
SymbolStatisticsPtr
|
||||
ScalarStatsCalculator::visitASTIdentifier(const ConstASTPtr & node, std::unordered_map<String, SymbolStatisticsPtr> & context)
|
||||
{
|
||||
auto & identifier = node->as<ASTIdentifier &>();
|
||||
const auto & identifier = node->as<ASTIdentifier &>();
|
||||
return context[identifier.name()];
|
||||
}
|
||||
|
||||
|
@ -96,7 +96,7 @@ ScalarStatsCalculator::visitASTFunction(const ConstASTPtr & node, std::unordered
|
|||
SymbolStatisticsPtr
|
||||
ScalarStatsCalculator::visitASTLiteral(const ConstASTPtr & node, std::unordered_map<String, SymbolStatisticsPtr> & context)
|
||||
{
|
||||
auto literal = dynamic_cast<const ASTLiteral *>(node.get());
|
||||
const auto * literal = dynamic_cast<const ASTLiteral *>(node.get());
|
||||
if (literal->value.isNull())
|
||||
return std::make_shared<SymbolStatistics>(1, 0, 0, 1);
|
||||
DataTypePtr tmp_type = type;
|
||||
|
|
|
@ -47,14 +47,14 @@ PlanNodeStatisticsPtr UnionEstimator::mapToOutput(
|
|||
{
|
||||
std::unordered_map<String, SymbolStatisticsPtr> output_symbol_statistics;
|
||||
|
||||
for (auto & symbol : out_to_input)
|
||||
for (const auto & symbol : out_to_input)
|
||||
{
|
||||
String output_symbol = symbol.first;
|
||||
auto & input_symbols = symbol.second;
|
||||
const auto & input_symbols = symbol.second;
|
||||
output_symbol_statistics[output_symbol] = child_stats.getSymbolStatistics(input_symbols.at(index))->copy();
|
||||
}
|
||||
|
||||
return std::make_shared<PlanNodeStatistics>(child_stats.getRowCount(), output_symbol_statistics);
|
||||
return std::make_shared<PlanNodeStatistics>(child_stats.getRowCount(), std::move(output_symbol_statistics));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -134,7 +134,7 @@ GroupExprPtr CascadesContext::initMemo(const PlanNodePtr & plan_node)
|
|||
{
|
||||
queue.push(child);
|
||||
}
|
||||
if (auto read_step = dynamic_cast<const CTERefStep *>(node->getStep().get()))
|
||||
if (const auto * read_step = dynamic_cast<const CTERefStep *>(node->getStep().get()))
|
||||
{
|
||||
if (!memo.containsCTEId(read_step->getId()))
|
||||
{
|
||||
|
|
|
@ -83,7 +83,7 @@ void Group::addExpression(const GroupExprPtr & expression, CascadesContext & con
|
|||
is_table_scans.emplace_back(context.getMemo().getGroupById(child)->isTableScan());
|
||||
}
|
||||
statistics = CardinalityEstimator::estimate(
|
||||
expression->getStep(), context.getCTEInfo(), children_stats, context.getContext(), simple_children, is_table_scans);
|
||||
expression->getStep(), context.getCTEInfo(), std::move(children_stats), context.getContext(), simple_children, is_table_scans);
|
||||
|
||||
stats_derived = true;
|
||||
}
|
||||
|
@ -97,7 +97,7 @@ void Group::addExpression(const GroupExprPtr & expression, CascadesContext & con
|
|||
{
|
||||
children.emplace_back(context.getMemo().getGroupById(child)->getEquivalences());
|
||||
}
|
||||
equivalences = SymbolEquivalencesDeriver::deriveEquivalences(expression->getStep(), children);
|
||||
equivalences = SymbolEquivalencesDeriver::deriveEquivalences(expression->getStep(), std::move(children));
|
||||
}
|
||||
else
|
||||
{
|
||||
|
|
|
@ -208,7 +208,7 @@ bool GroupExprBindingIterator::hasNext()
|
|||
}
|
||||
|
||||
const auto & statistics = memo.getGroupById(group_expr->getGroupId())->getStatistics();
|
||||
current_binding = PlanNodeBase::createPlanNode(context->nextNodeId(), group_expr->getStep(), children, statistics);
|
||||
current_binding = PlanNodeBase::createPlanNode(context->nextNodeId(), group_expr->getStep(), std::move(children), statistics);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -222,7 +222,7 @@ CostWithCTEReferenceCounts PlanCostVisitor::visitPlanNode(PlanNodeBase & node, P
|
|||
.children_stats = children_stats, .worker_size = worker_size};
|
||||
cost += VisitorUtil::accept(node.getStep(), visitor, cost_context).getCost();
|
||||
plan_cost_map.emplace(node.getId(), cost);
|
||||
return CostWithCTEReferenceCounts{cost, cte_reference_counts};
|
||||
return CostWithCTEReferenceCounts{cost, std::move(cte_reference_counts)};
|
||||
}
|
||||
|
||||
CostWithCTEReferenceCounts PlanCostVisitor::visitCTERefNode(CTERefNode & node, PlanCostMap & plan_cost_map)
|
||||
|
|
|
@ -74,7 +74,7 @@ EqualityInference EqualityInference::newInstance(const std::vector<ConstASTPtr>
|
|||
|
||||
auto sub_expressions = SubExpressionExtractor::extract(expr);
|
||||
ConstASTSet sub_expressions_remove_itself;
|
||||
for (auto & sub_expression : sub_expressions)
|
||||
for (const auto & sub_expression : sub_expressions)
|
||||
{
|
||||
if (sub_expression != expr)
|
||||
{
|
||||
|
@ -82,7 +82,7 @@ EqualityInference EqualityInference::newInstance(const std::vector<ConstASTPtr>
|
|||
}
|
||||
}
|
||||
sub_expressions = sub_expressions_remove_itself;
|
||||
for (auto & sub_expression : sub_expressions)
|
||||
for (const auto & sub_expression : sub_expressions)
|
||||
{
|
||||
if (by_expressions.contains(sub_expression))
|
||||
{
|
||||
|
@ -92,18 +92,18 @@ EqualityInference EqualityInference::newInstance(const std::vector<ConstASTPtr>
|
|||
}
|
||||
}
|
||||
|
||||
std::unordered_map<ConstASTPtr, ConstASTSet, ASTEquality::ASTHash, ASTEquality::ASTEquals> equality_sets = makeEqualitySets(equalities);
|
||||
auto equality_sets = makeEqualitySets(equalities);
|
||||
ConstASTMap canonical_mappings;
|
||||
|
||||
for (auto & equality_set : equality_sets)
|
||||
{
|
||||
for (auto & value : equality_set.second)
|
||||
for (const auto & value : equality_set.second)
|
||||
{
|
||||
canonical_mappings[value] = equality_set.first;
|
||||
}
|
||||
}
|
||||
|
||||
return EqualityInference{equality_sets, canonical_mappings, derived_expressions};
|
||||
return EqualityInference(equality_sets, std::move(canonical_mappings), {});
|
||||
}
|
||||
|
||||
bool EqualityInference::isInferenceCandidate(const ConstASTPtr & predicate, ContextMutablePtr & context)
|
||||
|
@ -390,7 +390,7 @@ EqualityPartition EqualityInference::partitionedBy(std::set<String> scope)
|
|||
}
|
||||
}
|
||||
}
|
||||
return EqualityPartition{scope_equalities, scope_complement_equalities, scope_straddling_equalities};
|
||||
return EqualityPartition(std::move(scope_equalities), std::move(scope_complement_equalities), std::move(scope_straddling_equalities));
|
||||
}
|
||||
|
||||
bool DisjointSet::findAndUnion(const ConstASTPtr & element_1, const ConstASTPtr & element_2)
|
||||
|
|
|
@ -65,7 +65,8 @@ TranslationResult SetOperationNodeTranslator::makeSetContainmentPlanForDistinct(
|
|||
aggregates.push_back(aggregate_desc);
|
||||
}
|
||||
|
||||
auto agg_step = std::make_shared<AggregatingStep>(union_node->getStep()->getOutputStream(), group_by_keys, aggregates, GroupingSetsParamsList{}, true, GroupingDescriptions{}, false, false);
|
||||
auto agg_step = std::make_shared<AggregatingStep>(union_node->getStep()->getOutputStream(), std::move(group_by_keys), std::move(aggregates), GroupingSetsParamsList{}, true, GroupingDescriptions{}, false, false);
|
||||
|
||||
PlanNodes children{union_node};
|
||||
PlanNodePtr agg_node = std::make_shared<AggregatingNode>(context.nextNodeId(), std::move(agg_step), children);
|
||||
|
||||
|
|
|
@ -83,7 +83,7 @@ bool IterativeRewriter::exploreNode(PlanNodePtr & node, IterativeRewriterContext
|
|||
auto node_type = node->getStep()->getType();
|
||||
if (auto res = rules.find(node_type); res != rules.end())
|
||||
{
|
||||
auto & rules_of_this_type = res->second;
|
||||
const auto & rules_of_this_type = res->second;
|
||||
for (auto iter = rules_of_this_type.begin();
|
||||
// we can break the loop if the sub-plan has been entirely removed or the node type has been changed
|
||||
node && node->getStep()->getType() == node_type && iter != rules_of_this_type.end();
|
||||
|
|
|
@ -85,7 +85,7 @@ JoinGraph JoinGraph::withJoinGraph(
|
|||
}
|
||||
|
||||
bool is_contains_cross_join = contains_cross_join || contains_cross_join_;
|
||||
return JoinGraph{nodes_merged, edges_merged, filters_merged, new_root, is_contains_cross_join, new_original_node};
|
||||
return JoinGraph{std::move(nodes_merged), std::move(edges_merged), std::move(filters_merged), new_root, is_contains_cross_join, std::move(new_original_node)};
|
||||
}
|
||||
|
||||
String JoinGraph::toString() // NOLINT
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include <utility>
|
||||
#include <Optimizer/MergeSetOperation.h>
|
||||
|
||||
#include <Interpreters/predicateExpressionsUtils.h>
|
||||
|
@ -88,13 +89,13 @@ PlanNodePtr SetOperationMerge::merge()
|
|||
|
||||
if (node->getStep()->getType() == IQueryPlanStep::Type::Union)
|
||||
{
|
||||
auto union_step = std::make_unique<UnionStep>(input_stream, output, output_to_inputs);
|
||||
PlanNodePtr union_node = std::make_shared<UnionNode>(context.nextNodeId(), std::move(union_step), new_sources);
|
||||
auto union_step = std::make_unique<UnionStep>(std::move(input_stream), std::move(output), std::move(output_to_inputs));
|
||||
PlanNodePtr union_node = std::make_shared<UnionNode>(context.nextNodeId(), std::move(union_step), std::move(new_sources));
|
||||
return union_node;
|
||||
}
|
||||
|
||||
auto intersect_step = std::make_unique<IntersectStep>(input_stream, output, output_to_inputs, result_is_distinct);
|
||||
PlanNodePtr intersect_node = std::make_shared<IntersectNode>(context.nextNodeId(), std::move(intersect_step), new_sources);
|
||||
auto intersect_step = std::make_unique<IntersectStep>(std::move(input_stream), std::move(output), std::move(output_to_inputs), result_is_distinct);
|
||||
PlanNodePtr intersect_node = std::make_shared<IntersectNode>(context.nextNodeId(), std::move(intersect_step), std::move(new_sources));
|
||||
return intersect_node;
|
||||
}
|
||||
|
||||
|
@ -136,20 +137,20 @@ PlanNodePtr SetOperationMerge::mergeFirstSource()
|
|||
|
||||
if (node->getStep()->getType() == IQueryPlanStep::Type::Union)
|
||||
{
|
||||
auto union_step = std::make_unique<UnionStep>(input_stream, output, false);
|
||||
PlanNodePtr union_node = std::make_shared<UnionNode>(context.nextNodeId(), std::move(union_step), new_sources);
|
||||
auto union_step = std::make_unique<UnionStep>(std::move(input_stream), std::move(output), false);
|
||||
PlanNodePtr union_node = std::make_shared<UnionNode>(context.nextNodeId(), std::move(union_step), std::move(new_sources));
|
||||
return union_node;
|
||||
}
|
||||
if (node->getStep()->getType() == IQueryPlanStep::Type::Intersect)
|
||||
{
|
||||
auto intersect_step = std::make_unique<IntersectStep>(input_stream, output, merged_quantifier.value());
|
||||
PlanNodePtr intersect_node = std::make_shared<IntersectNode>(context.nextNodeId(), std::move(intersect_step), new_sources);
|
||||
auto intersect_step = std::make_unique<IntersectStep>(std::move(input_stream), std::move(output), merged_quantifier.value());
|
||||
PlanNodePtr intersect_node = std::make_shared<IntersectNode>(context.nextNodeId(), std::move(intersect_step), std::move(new_sources));
|
||||
return intersect_node;
|
||||
}
|
||||
if (node->getStep()->getType() == IQueryPlanStep::Type::Except)
|
||||
{
|
||||
auto except_step = std::make_unique<ExceptStep>(input_stream, output, merged_quantifier.value());
|
||||
PlanNodePtr except_node = std::make_shared<ExceptNode>(context.nextNodeId(), std::move(except_step), new_sources);
|
||||
PlanNodePtr except_node = std::make_shared<ExceptNode>(context.nextNodeId(), std::move(except_step), std::move(new_sources));
|
||||
return except_node;
|
||||
}
|
||||
return nullptr;
|
||||
|
|
|
@ -156,7 +156,7 @@ Property DeriverVisitor::visitFilterStep(const FilterStep &, DeriverContext & co
|
|||
Property DeriverVisitor::visitJoinStep(const JoinStep & step, DeriverContext & context)
|
||||
{
|
||||
std::unordered_map<String, String> identities;
|
||||
for (auto & item : step.getOutputStream().header)
|
||||
for (const auto & item : step.getOutputStream().header)
|
||||
{
|
||||
identities[item.name] = item.name;
|
||||
}
|
||||
|
|
|
@ -68,7 +68,7 @@ Property PropertyMatcher::compatibleCommonRequiredProperty(const std::unordered_
|
|||
const auto & node_partition = res.getNodePartitioning();
|
||||
const auto handle = node_partition.getPartitioningHandle();
|
||||
std::unordered_set<String> columns_set;
|
||||
for (auto & item : node_partition.getPartitioningColumns())
|
||||
for (const auto & item : node_partition.getPartitioningColumns())
|
||||
columns_set.emplace(item);
|
||||
|
||||
for (; it != required_properties.end(); ++it)
|
||||
|
@ -85,7 +85,7 @@ Property PropertyMatcher::compatibleCommonRequiredProperty(const std::unordered_
|
|||
if (partition_handle == Partitioning::Handle::FIXED_HASH)
|
||||
{
|
||||
std::unordered_set<String> intersection;
|
||||
auto & partition_columns = it->getNodePartitioning().getPartitioningColumns();
|
||||
const auto & partition_columns = it->getNodePartitioning().getPartitioningColumns();
|
||||
std::copy_if(
|
||||
partition_columns.begin(),
|
||||
partition_columns.end(),
|
||||
|
@ -100,9 +100,7 @@ Property PropertyMatcher::compatibleCommonRequiredProperty(const std::unordered_
|
|||
if (is_all_broadcast)
|
||||
return Property{Partitioning{Partitioning::Handle::FIXED_BROADCAST}};
|
||||
|
||||
Names partition_columns;
|
||||
for (auto & item : columns_set)
|
||||
partition_columns.emplace_back(item);
|
||||
Names partition_columns{columns_set.begin(), columns_set.end()};
|
||||
|
||||
// no need to consider require_handle / buckets / enforce_round_robin for required property
|
||||
return Property{Partitioning{handle, std::move(partition_columns)}};
|
||||
|
|
|
@ -169,7 +169,7 @@ std::optional<PlanNodePtr> PushProjectionThroughJoin::pushProjectionThroughJoin(
|
|||
else
|
||||
{
|
||||
auto left_expression_step
|
||||
= std::make_shared<ProjectionStep>(join_left->getStep()->getOutputStream(), left_assignments, left_name_to_type);
|
||||
= std::make_shared<ProjectionStep>(join_left->getStep()->getOutputStream(), std::move(left_assignments), std::move(left_name_to_type));
|
||||
PlanNodePtr left_expression_node
|
||||
= std::make_shared<ProjectionNode>(context->nextNodeId(), std::move(left_expression_step), PlanNodes{join_left});
|
||||
left_expression_step_inline = inlineProjections(left_expression_node, context);
|
||||
|
@ -184,7 +184,7 @@ std::optional<PlanNodePtr> PushProjectionThroughJoin::pushProjectionThroughJoin(
|
|||
else
|
||||
{
|
||||
auto right_expression_step
|
||||
= std::make_shared<ProjectionStep>(join_right->getStep()->getOutputStream(), right_assignments, right_name_to_type);
|
||||
= std::make_shared<ProjectionStep>(join_right->getStep()->getOutputStream(), std::move(right_assignments), std::move(right_name_to_type));
|
||||
PlanNodePtr right_expression_node
|
||||
= std::make_shared<ProjectionNode>(context->nextNodeId(), std::move(right_expression_step), PlanNodes{join_right});
|
||||
right_expression_step_inline = inlineProjections(right_expression_node, context);
|
||||
|
@ -251,11 +251,11 @@ std::set<String> PushProjectionThroughJoin::getJoinRequiredSymbols(JoinNode & no
|
|||
|
||||
const auto & step = *node.getStep();
|
||||
|
||||
for (auto & key : step.getLeftKeys())
|
||||
for (const auto & key : step.getLeftKeys())
|
||||
{
|
||||
join_symbols.emplace(key);
|
||||
}
|
||||
for (auto & key : step.getRightKeys())
|
||||
for (const auto & key : step.getRightKeys())
|
||||
{
|
||||
join_symbols.emplace(key);
|
||||
}
|
||||
|
|
|
@ -153,7 +153,7 @@ PlanNodePtr ColumnPruningVisitor::visitProjectionNode(ProjectionNode & node, Nam
|
|||
return child;
|
||||
|
||||
auto expr_step = std::make_shared<ProjectionStep>(
|
||||
child->getStep()->getOutputStream(), assignments, name_to_type, step->isFinalProject(), step->getDynamicFilters());
|
||||
child->getStep()->getOutputStream(), std::move(assignments), std::move(name_to_type), step->isFinalProject(), step->getDynamicFilters());
|
||||
PlanNodes children{child};
|
||||
auto expr_node = ProjectionNode::createPlanNode(context->nextNodeId(), std::move(expr_step), children, node.getStatistics());
|
||||
return expr_node;
|
||||
|
@ -188,7 +188,7 @@ PlanNodePtr ColumnPruningVisitor::visitApplyNode(ApplyNode & node, NameSet & req
|
|||
}
|
||||
else if(ast && ast->as<ASTQuantifiedComparison>())
|
||||
{
|
||||
auto & qc = ast->as<ASTQuantifiedComparison &>();
|
||||
const auto & qc = ast->as<ASTQuantifiedComparison &>();
|
||||
ASTIdentifier & qc_left = qc.children[0]->as<ASTIdentifier &>();
|
||||
left_require.insert(qc_left.name());
|
||||
}
|
||||
|
@ -288,7 +288,7 @@ PlanNodePtr ColumnPruningVisitor::visitTableScanNode(TableScanNode & node, NameS
|
|||
column_names.emplace_back(item);
|
||||
|
||||
auto read_step = std::make_shared<TableScanStep>(
|
||||
context, step->getStorageID(), column_names, step->getQueryInfo(), step->getProcessedStage(), step->getMaxBlockSize());
|
||||
context, step->getStorageID(), std::move(column_names), step->getQueryInfo(), step->getProcessedStage(), step->getMaxBlockSize());
|
||||
auto read_node = PlanNodeBase::createPlanNode(context->nextNodeId(), std::move(read_step), {}, node.getStatistics());
|
||||
return read_node;
|
||||
}
|
||||
|
@ -315,14 +315,14 @@ PlanNodePtr ColumnPruningVisitor::visitAggregatingNode(AggregatingNode & node, N
|
|||
auto child = VisitorUtil::accept(node.getChildren()[0], *this, child_require);
|
||||
if (aggs.empty() && step->getKeys().empty())
|
||||
{
|
||||
auto [symbol, node] = createDummyPlanNode(context);
|
||||
auto [symbol, node_] = createDummyPlanNode(context);
|
||||
(void) symbol;
|
||||
// require_.insert(symbol);
|
||||
return node;
|
||||
return node_;
|
||||
}
|
||||
|
||||
auto agg_step = std::make_shared<AggregatingStep>(
|
||||
child->getStep()->getOutputStream(), step->getKeys(), aggs, step->getGroupingSetsParams(), step->isFinal(), step->getGroupings()
|
||||
child->getStep()->getOutputStream(), step->getKeys(), std::move(aggs), step->getGroupingSetsParams(), step->isFinal(), step->getGroupings()
|
||||
, false, step->shouldProduceResultsInOrderOfBucketNumber()
|
||||
// step->getHaving(),
|
||||
// step->getInteresteventsInfoList()
|
||||
|
@ -529,7 +529,7 @@ PlanNodePtr ColumnPruningVisitor::visitUnionNode(UnionNode & node, NameSet & req
|
|||
children.emplace_back(new_child);
|
||||
}
|
||||
|
||||
auto union_step = std::make_shared<UnionStep>(children_streams, output_stream, output_to_inputs, step->getMaxThreads(), step->isLocal());
|
||||
auto union_step = std::make_shared<UnionStep>(std::move(children_streams), std::move(output_stream), std::move(output_to_inputs), step->getMaxThreads(), step->isLocal());
|
||||
auto union_node = UnionNode::createPlanNode(context->nextNodeId(), std::move(union_step), children, node.getStatistics());
|
||||
return union_node;
|
||||
}
|
||||
|
@ -565,10 +565,11 @@ PlanNodePtr ColumnPruningVisitor::visitExceptNode(ExceptNode & node, NameSet &)
|
|||
children.emplace_back(new_child);
|
||||
}
|
||||
|
||||
auto except_step = std::make_shared<ExceptStep>(children_streams, output_stream, step->isDistinct());
|
||||
auto except_step = std::make_shared<ExceptStep>(std::move(children_streams), std::move(output_stream), step->isDistinct());
|
||||
auto except_node = ExceptNode::createPlanNode(context->nextNodeId(), std::move(except_step), children, node.getStatistics());
|
||||
return except_node;
|
||||
}
|
||||
|
||||
PlanNodePtr ColumnPruningVisitor::visitIntersectNode(IntersectNode & node, NameSet &)
|
||||
{
|
||||
const auto * step = node.getStep().get();
|
||||
|
@ -601,7 +602,7 @@ PlanNodePtr ColumnPruningVisitor::visitIntersectNode(IntersectNode & node, NameS
|
|||
children.emplace_back(new_child);
|
||||
}
|
||||
|
||||
auto intersect_step = std::make_shared<IntersectStep>(children_streams, output_stream, step->isDistinct());
|
||||
auto intersect_step = std::make_shared<IntersectStep>(std::move(children_streams), std::move(output_stream), step->isDistinct());
|
||||
auto intersect_node = IntersectNode::createPlanNode(context->nextNodeId(), std::move(intersect_step), children, node.getStatistics());
|
||||
return intersect_node;
|
||||
}
|
||||
|
@ -635,7 +636,7 @@ PlanNodePtr ColumnPruningVisitor::visitExchangeNode(ExchangeNode & node, NameSet
|
|||
input_streams.emplace_back(child->getStep()->getOutputStream());
|
||||
}
|
||||
|
||||
auto exchange_step = std::make_shared<ExchangeStep>(input_streams, step->getExchangeMode(), step->getSchema(), step->needKeepOrder());
|
||||
auto exchange_step = std::make_shared<ExchangeStep>(std::move(input_streams), step->getExchangeMode(), step->getSchema(), step->needKeepOrder());
|
||||
return ExchangeNode::createPlanNode(context->nextNodeId(), std::move(exchange_step), children, node.getStatistics());
|
||||
}
|
||||
|
||||
|
@ -665,7 +666,7 @@ PlanNodePtr ColumnPruningVisitor::visitCTERefNode(CTERefNode & node, NameSet & r
|
|||
output_columns.emplace(item);
|
||||
|
||||
auto exchange_step
|
||||
= std::make_shared<CTERefStep>(DataStream{result_columns}, with_step->getId(), output_columns, with_step->getFilter());
|
||||
= std::make_shared<CTERefStep>(DataStream{std::move(result_columns)}, with_step->getId(), std::move(output_columns), with_step->getFilter());
|
||||
return CTERefNode::createPlanNode(context->nextNodeId(), std::move(exchange_step), {}, node.getStatistics());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -185,8 +185,9 @@ PlanNodePtr PredicateVisitor::visitFilterNode(FilterNode & node, PredicateContex
|
|||
|
||||
PlanNodePtr PredicateVisitor::visitAggregatingNode(AggregatingNode & node, PredicateContext & predicate_context)
|
||||
{
|
||||
const auto & step = * node.getStep();
|
||||
auto & keys = step.getKeys();
|
||||
|
||||
const auto & step = *node.getStep();
|
||||
const auto & keys = step.getKeys();
|
||||
|
||||
// TODO: in case of grouping sets, we should be able to push the filters over grouping keys below the aggregation
|
||||
// and also preserve the filter above the aggregation if it has an empty grouping set
|
||||
|
@ -214,7 +215,7 @@ PlanNodePtr PredicateVisitor::visitAggregatingNode(AggregatingNode & node, Predi
|
|||
|
||||
// Sort non-equality predicates by those that can be pushed down and those that cannot
|
||||
std::set<String> grouping_keys;
|
||||
for (auto & key : keys)
|
||||
for (const auto & key : keys)
|
||||
{
|
||||
grouping_keys.emplace(key);
|
||||
}
|
||||
|
@ -357,8 +358,7 @@ PlanNodePtr PredicateVisitor::visitJoinNode(JoinNode & node, PredicateContext &
|
|||
const DataStream & left_output = left->getStep()->getOutputStream();
|
||||
for (const auto & column : left_output.header)
|
||||
{
|
||||
Assignment left_assignment{column.name, std::make_shared<ASTIdentifier>(column.name)};
|
||||
left_assignments.emplace_back(left_assignment);
|
||||
left_assignments.emplace_back(column.name, std::make_shared<ASTIdentifier>(column.name));
|
||||
left_types[column.name] = column.type;
|
||||
}
|
||||
|
||||
|
@ -367,8 +367,7 @@ PlanNodePtr PredicateVisitor::visitJoinNode(JoinNode & node, PredicateContext &
|
|||
const DataStream & right_output = right->getStep()->getOutputStream();
|
||||
for (const auto & column : right_output.header)
|
||||
{
|
||||
Assignment right_assignment{column.name, std::make_shared<ASTIdentifier>(column.name)};
|
||||
right_assignments.emplace_back(right_assignment);
|
||||
right_assignments.emplace_back(column.name, std::make_shared<ASTIdentifier>(column.name));
|
||||
right_types[column.name] = column.type;
|
||||
}
|
||||
|
||||
|
@ -474,12 +473,12 @@ PlanNodePtr PredicateVisitor::visitJoinNode(JoinNode & node, PredicateContext &
|
|||
}
|
||||
|
||||
auto left_source_expression_step
|
||||
= std::make_shared<ProjectionStep>(left_source->getStep()->getOutputStream(), left_assignments, left_types);
|
||||
= std::make_shared<ProjectionStep>(left_source->getStep()->getOutputStream(), std::move(left_assignments), std::move(left_types));
|
||||
auto left_source_expression_node
|
||||
= std::make_shared<ProjectionNode>(context->nextNodeId(), std::move(left_source_expression_step), PlanNodes{left_source});
|
||||
|
||||
auto right_source_expression_step = std::make_shared<ProjectionStep>(
|
||||
right_source->getStep()->getOutputStream(), right_assignments, right_types, false, dynamic_filters_results.dynamic_filters);
|
||||
right_source->getStep()->getOutputStream(), std::move(right_assignments), std::move(right_types), false, std::move(dynamic_filters_results.dynamic_filters));
|
||||
auto right_source_expression_node
|
||||
= std::make_shared<ProjectionNode>(context->nextNodeId(), std::move(right_source_expression_step), PlanNodes{right_source});
|
||||
|
||||
|
@ -495,11 +494,11 @@ PlanNodePtr PredicateVisitor::visitJoinNode(JoinNode & node, PredicateContext &
|
|||
NamesAndTypes output;
|
||||
for (const auto & item : left_header)
|
||||
{
|
||||
output.emplace_back(NameAndTypePair{item.name, item.type});
|
||||
output.emplace_back(item.name, item.type);
|
||||
}
|
||||
for (const auto & item : right_header)
|
||||
{
|
||||
output.emplace_back(NameAndTypePair{item.name, item.type});
|
||||
output.emplace_back(item.name, item.type);
|
||||
}
|
||||
|
||||
// cast extracted join keys to super type
|
||||
|
@ -582,8 +581,8 @@ PlanNodePtr PredicateVisitor::visitJoinNode(JoinNode & node, PredicateContext &
|
|||
DataStream{.header = output},
|
||||
ASTTableJoin::Kind::Inner,
|
||||
ASTTableJoin::Strictness::All,
|
||||
left_keys,
|
||||
right_keys,
|
||||
std::move(left_keys),
|
||||
std::move(right_keys),
|
||||
new_join_filter,
|
||||
step->isHasUsing(),
|
||||
step->getRequireRightKeys(),
|
||||
|
@ -598,8 +597,8 @@ PlanNodePtr PredicateVisitor::visitJoinNode(JoinNode & node, PredicateContext &
|
|||
DataStream{.header = output},
|
||||
kind,
|
||||
step->getStrictness(),
|
||||
left_keys,
|
||||
right_keys,
|
||||
std::move(left_keys),
|
||||
std::move(right_keys),
|
||||
new_join_filter,
|
||||
step->isHasUsing(),
|
||||
step->getRequireRightKeys(),
|
||||
|
@ -658,7 +657,7 @@ PlanNodePtr PredicateVisitor::visitJoinNode(JoinNode & node, PredicateContext &
|
|||
output_types[column.name] = column.type;
|
||||
}
|
||||
auto output_expression_step
|
||||
= std::make_shared<ProjectionStep>(output_node->getStep()->getOutputStream(), output_assignments, output_types);
|
||||
= std::make_shared<ProjectionStep>(output_node->getStep()->getOutputStream(), std::move(output_assignments), std::move(output_types));
|
||||
auto output_expression_node
|
||||
= std::make_shared<ProjectionNode>(context->nextNodeId(), std::move(output_expression_step), PlanNodes{output_node});
|
||||
output_node = output_expression_node;
|
||||
|
@ -1091,7 +1090,7 @@ OuterJoinResult PredicateVisitor::processOuterJoin(
|
|||
= EqualityInference::newInstance(std::vector<ConstASTPtr>{inherited_predicate, outer_predicate}, context);
|
||||
|
||||
EqualityPartition equality_partition = inherited_inference.partitionedBy(outer_symbols);
|
||||
auto & scope_equalities = equality_partition.getScopeEqualities();
|
||||
const auto & scope_equalities = equality_partition.getScopeEqualities();
|
||||
auto outer_only_inherited_equalities = PredicateUtils::combineConjuncts(scope_equalities);
|
||||
EqualityInference potential_null_symbol_inference = EqualityInference::newInstance(
|
||||
std::vector<ConstASTPtr>{outer_only_inherited_equalities, outer_predicate, inner_predicate, join_predicate}, context);
|
||||
|
@ -1104,7 +1103,7 @@ OuterJoinResult PredicateVisitor::processOuterJoin(
|
|||
|
||||
EqualityPartition potential_null_symbol_inference_without_inner_inferred_partition =
|
||||
potential_null_symbol_inference_without_inner_inferred.partitionedBy(inner_symbols);
|
||||
for (auto & conjunct : potential_null_symbol_inference_without_inner_inferred_partition.getScopeEqualities())
|
||||
for (const auto & conjunct : potential_null_symbol_inference_without_inner_inferred_partition.getScopeEqualities())
|
||||
{
|
||||
inner_pushdown_conjuncts.emplace_back(conjunct);
|
||||
}
|
||||
|
@ -1112,29 +1111,29 @@ OuterJoinResult PredicateVisitor::processOuterJoin(
|
|||
// TODO: we can further improve simplifying the equalities by considering other relationships from the outer side
|
||||
EqualityPartition join_equality_partition = EqualityInference::newInstance(join_predicate, context).partitionedBy(inner_symbols);
|
||||
|
||||
for (auto & conjunct : join_equality_partition.getScopeEqualities())
|
||||
for (const auto & conjunct : join_equality_partition.getScopeEqualities())
|
||||
{
|
||||
inner_pushdown_conjuncts.emplace_back(conjunct);
|
||||
}
|
||||
for (auto & conjunct : join_equality_partition.getScopeComplementEqualities())
|
||||
for (const auto & conjunct : join_equality_partition.getScopeComplementEqualities())
|
||||
{
|
||||
join_conjuncts.emplace_back(conjunct);
|
||||
}
|
||||
for (auto & conjunct : join_equality_partition.getScopeStraddlingEqualities())
|
||||
for (const auto & conjunct : join_equality_partition.getScopeStraddlingEqualities())
|
||||
{
|
||||
join_conjuncts.emplace_back(conjunct);
|
||||
}
|
||||
|
||||
// Add the equalities from the inferences back in
|
||||
for (auto & conjunct : equality_partition.getScopeEqualities())
|
||||
for (const auto & conjunct : equality_partition.getScopeEqualities())
|
||||
{
|
||||
outer_pushdown_conjuncts.emplace_back(conjunct);
|
||||
}
|
||||
for (auto & conjunct : equality_partition.getScopeComplementEqualities())
|
||||
for (const auto & conjunct : equality_partition.getScopeComplementEqualities())
|
||||
{
|
||||
post_join_conjuncts.emplace_back(conjunct);
|
||||
}
|
||||
for (auto & conjunct : equality_partition.getScopeStraddlingEqualities())
|
||||
for (const auto & conjunct : equality_partition.getScopeStraddlingEqualities())
|
||||
{
|
||||
post_join_conjuncts.emplace_back(conjunct);
|
||||
}
|
||||
|
|
|
@ -96,7 +96,7 @@ PlanNodePtr CorrelatedScalarSubqueryVisitor::visitApplyNode(ApplyNode & node, Vo
|
|||
if (subquery_step_ptr->getType() == IQueryPlanStep::Type::Aggregating)
|
||||
{
|
||||
const auto & step = dynamic_cast<const AggregatingStep &>(*subquery_step_ptr);
|
||||
auto & keys = step.getKeys();
|
||||
const auto & keys = step.getKeys();
|
||||
if (keys.empty())
|
||||
{
|
||||
match = true;
|
||||
|
@ -117,7 +117,7 @@ PlanNodePtr CorrelatedScalarSubqueryVisitor::visitApplyNode(ApplyNode & node, Vo
|
|||
if (child_step_ptr->getType() == IQueryPlanStep::Type::Aggregating)
|
||||
{
|
||||
const auto & step = dynamic_cast<const AggregatingStep &>(*child_step_ptr);
|
||||
auto & keys = step.getKeys();
|
||||
const auto & keys = step.getKeys();
|
||||
if (keys.empty())
|
||||
{
|
||||
match = true;
|
||||
|
@ -317,21 +317,19 @@ PlanNodePtr UnCorrelatedScalarSubqueryVisitor::visitApplyNode(ApplyNode & node,
|
|||
const DataStream & right_data_stream = subquery_ptr->getStep()->getOutputStream();
|
||||
DataStreams streams = {left_data_stream, right_data_stream};
|
||||
|
||||
auto left_header = left_data_stream.header;
|
||||
auto right_header = right_data_stream.header;
|
||||
NamesAndTypes output;
|
||||
for (const auto & item : left_header)
|
||||
for (const auto & item : left_data_stream.header)
|
||||
{
|
||||
output.emplace_back(NameAndTypePair{item.name, item.type});
|
||||
}
|
||||
for (const auto & item : right_header)
|
||||
for (const auto & item : right_data_stream.header)
|
||||
{
|
||||
output.emplace_back(NameAndTypePair{item.name, item.type});
|
||||
}
|
||||
|
||||
auto join_step = std::make_shared<JoinStep>(
|
||||
streams,
|
||||
DataStream{.header = output},
|
||||
DataStream{.header = std::move(output)},
|
||||
ASTTableJoin::Kind::Cross,
|
||||
ASTTableJoin::Strictness::All,
|
||||
Names{},
|
||||
|
@ -1135,22 +1133,20 @@ PlanNodePtr UnCorrelatedExistsSubqueryVisitor::visitApplyNode(ApplyNode & node,
|
|||
|
||||
DataStreams streams = {left_data_stream, right_data_stream};
|
||||
|
||||
auto left_header = left_data_stream.header;
|
||||
auto right_header = right_data_stream.header;
|
||||
NamesAndTypes output;
|
||||
for (const auto & item : left_header)
|
||||
for (const auto & item : left_data_stream.header)
|
||||
{
|
||||
output.emplace_back(NameAndTypePair{item.name, item.type});
|
||||
}
|
||||
for (const auto & item : right_header)
|
||||
for (const auto & item : right_data_stream.header)
|
||||
{
|
||||
output.emplace_back(NameAndTypePair{item.name, item.type});
|
||||
}
|
||||
|
||||
// step 3 : cross join, join rights side is a scalar value. (true/false)
|
||||
auto join_step = std::make_shared<JoinStep>(
|
||||
streams,
|
||||
DataStream{.header = output},
|
||||
std::move(streams),
|
||||
DataStream{.header = std::move(output)},
|
||||
ASTTableJoin::Kind::Cross,
|
||||
ASTTableJoin::Strictness::All,
|
||||
Names{},
|
||||
|
|
|
@ -49,7 +49,7 @@ static bool mustReplicate(const JoinStep & join_step)
|
|||
TransformResult SetJoinDistribution::transformImpl(PlanNodePtr node, const Captures &, RuleContext & context)
|
||||
{
|
||||
PlanNodes result;
|
||||
auto join_node = dynamic_cast<JoinNode *>(node.get());
|
||||
auto * join_node = dynamic_cast<JoinNode *>(node.get());
|
||||
if (!join_node)
|
||||
return {};
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ TransformResult DistinctToAggregate::transformImpl(PlanNodePtr node, const Captu
|
|||
return {};
|
||||
}
|
||||
|
||||
auto distinct_node = dynamic_cast<DistinctNode *>(node.get());
|
||||
auto * distinct_node = dynamic_cast<DistinctNode *>(node.get());
|
||||
if (!distinct_node)
|
||||
return {};
|
||||
|
||||
|
|
|
@ -131,7 +131,7 @@ static MappedAggregationInfo createAggregationOverNull(const AggregatingStep * r
|
|||
= std::make_shared<AggregatingStep>(null_row->getStep()->getOutputStream(), Names{}, aggregations_over_null, GroupingSetsParamsList{}, true, GroupingDescriptions{}, false, false);
|
||||
auto aggregation_over_null_row = PlanNodeBase::createPlanNode(context.nextNodeId(), std::move(aggregation_over_null_row_step), {null_row});
|
||||
|
||||
return MappedAggregationInfo{aggregation_over_null_row, aggregations_symbol_mapping};
|
||||
return MappedAggregationInfo{.aggregation_node = std::move(aggregation_over_null_row), .symbolMapping = std::move(aggregations_symbol_mapping)};
|
||||
}
|
||||
|
||||
// When the aggregation is done after the join, there will be a null value that gets aggregated over
|
||||
|
@ -197,7 +197,7 @@ static PlanNodePtr coalesceWithNullAggregation(const AggregatingStep * aggregati
|
|||
name_to_type[symbol.name] = symbol.type;
|
||||
}
|
||||
|
||||
auto projection_step = std::make_shared<ProjectionStep>(cross_join->getStep()->getOutputStream(), assignments_builder, name_to_type);
|
||||
auto projection_step = std::make_shared<ProjectionStep>(cross_join->getStep()->getOutputStream(), std::move(assignments_builder), std::move(name_to_type));
|
||||
return PlanNodeBase::createPlanNode(context.nextNodeId(), std::move(projection_step), {cross_join});
|
||||
}
|
||||
|
||||
|
|
|
@ -37,9 +37,9 @@ PatternPtr PushLimitIntoDistinct::getPattern() const
|
|||
|
||||
TransformResult PushLimitIntoDistinct::transformImpl(PlanNodePtr node, const Captures &, RuleContext &)
|
||||
{
|
||||
auto limit_step = dynamic_cast<const LimitStep *>(node->getStep().get());
|
||||
const auto * limit_step = dynamic_cast<const LimitStep *>(node->getStep().get());
|
||||
auto distinct = node->getChildren()[0];
|
||||
auto distinct_step = dynamic_cast<const DistinctStep *>(distinct->getStep().get());
|
||||
const auto * distinct_step = dynamic_cast<const DistinctStep *>(distinct->getStep().get());
|
||||
|
||||
// when limit 0, we skip this rule since another rule will delete the whole node
|
||||
auto limit_value = limit_step->getLimit();
|
||||
|
|
|
@ -36,17 +36,17 @@ TransformResult PushFilterIntoTableScan::transformImpl(PlanNodePtr node, const C
|
|||
{
|
||||
auto table_scan = node->getChildren()[0];
|
||||
|
||||
auto filter_step = dynamic_cast<const FilterStep *>(node->getStep().get());
|
||||
const auto * filter_step = dynamic_cast<const FilterStep *>(node->getStep().get());
|
||||
auto filter_conjuncts = PredicateUtils::extractConjuncts(filter_step->getFilter());
|
||||
|
||||
auto pushdown_filters = extractPushDownFilter(filter_conjuncts, rule_context.context);
|
||||
if (!pushdown_filters.empty())
|
||||
{
|
||||
auto copy_table_step = table_scan->getStep()->copy(rule_context.context);
|
||||
auto table_step = dynamic_cast<TableScanStep *>(copy_table_step.get());
|
||||
auto * table_step = dynamic_cast<TableScanStep *>(copy_table_step.get());
|
||||
|
||||
std::unordered_map<String, String> inv_alias;
|
||||
for (auto & item : table_step->getColumnAlias())
|
||||
for (const auto & item : table_step->getColumnAlias())
|
||||
inv_alias.emplace(item.second, item.first);
|
||||
|
||||
auto mapper = SymbolMapper::symbolMapper(inv_alias);
|
||||
|
@ -106,7 +106,7 @@ std::vector<ConstASTPtr> PushFilterIntoTableScan::extractPushDownFilter(const st
|
|||
std::vector<ConstASTPtr> PushFilterIntoTableScan::removeStorageFilter(const std::vector<ConstASTPtr> & conjuncts)
|
||||
{
|
||||
std::vector<ConstASTPtr> remove_array_set_check;
|
||||
for (auto & conjunct : conjuncts)
|
||||
for (const auto & conjunct : conjuncts)
|
||||
{
|
||||
// Attention !!!
|
||||
// arraySetCheck must push into storage, it is not executable in engine.
|
||||
|
@ -132,12 +132,12 @@ PatternPtr PushLimitIntoTableScan::getPattern() const
|
|||
|
||||
TransformResult PushLimitIntoTableScan::transformImpl(PlanNodePtr node, const Captures &, RuleContext & rule_context)
|
||||
{
|
||||
auto limit_step = dynamic_cast<const LimitStep *>(node->getStep().get());
|
||||
const auto * limit_step = dynamic_cast<const LimitStep *>(node->getStep().get());
|
||||
auto table_scan = node->getChildren()[0];
|
||||
|
||||
auto copy_table_step = table_scan->getStep()->copy(rule_context.context);
|
||||
|
||||
auto table_step = dynamic_cast<TableScanStep *>(copy_table_step.get());
|
||||
auto * table_step = dynamic_cast<TableScanStep *>(copy_table_step.get());
|
||||
bool applied = table_step->setLimit(limit_step->getLimit() + limit_step->getOffset(), rule_context.context);
|
||||
if (!applied)
|
||||
return {}; // repeat calls
|
||||
|
|
|
@ -65,6 +65,7 @@ static std::pair<Names, Names> createJoinCondition(UnionFind<String> & union_fin
|
|||
|
||||
// create join key using the common equivalent symbols, each equivalent set create one join criteria.
|
||||
std::vector<std::pair<String, String>> criteria;
|
||||
criteria.reserve(intersect_set.size());
|
||||
for (const auto & set : intersect_set)
|
||||
{
|
||||
criteria.emplace_back(
|
||||
|
|
|
@ -92,7 +92,7 @@ PlanNodePtr MagicSetRule::buildMagicSetAsFilterJoin(
|
|||
}
|
||||
filter_node = PlanNodeBase::createPlanNode(
|
||||
context->nextNodeId(),
|
||||
std::make_shared<ProjectionStep>(DataStream{.header = names_and_types}, assignments, name_to_type),
|
||||
std::make_shared<ProjectionStep>(DataStream{.header = names_and_types}, std::move(assignments), std::move(name_to_type)),
|
||||
PlanNodes{filter_node});
|
||||
}
|
||||
|
||||
|
|
|
@ -63,7 +63,7 @@ static std::optional<PlanNodePtr> createNewJoin(
|
|||
Context & context,
|
||||
NamesAndTypes output_stream = {})
|
||||
{
|
||||
auto & left_keys = inner_join->getLeftKeys();
|
||||
const auto & left_keys = inner_join->getLeftKeys();
|
||||
|
||||
NameSet first_output;
|
||||
for (const auto & item : first->getStep()->getOutputStream().header)
|
||||
|
@ -71,7 +71,7 @@ static std::optional<PlanNodePtr> createNewJoin(
|
|||
first_output.insert(item.name);
|
||||
}
|
||||
|
||||
for (auto & left_key : left_keys)
|
||||
for (const auto & left_key : left_keys)
|
||||
{
|
||||
// C only join A
|
||||
if (!first_output.contains(left_key))
|
||||
|
@ -210,7 +210,7 @@ TransformResult PullLeftJoinProjectionThroughInnerJoin::transformImpl(PlanNodePt
|
|||
name_to_type[item.name] = item.type;
|
||||
}
|
||||
|
||||
auto new_project_step = std::make_shared<ProjectionStep>(result->getStep()->getOutputStream(), assignments, name_to_type);
|
||||
auto new_project_step = std::make_shared<ProjectionStep>(result->getStep()->getOutputStream(), std::move(assignments), std::move(name_to_type));
|
||||
|
||||
return PlanNodeBase::createPlanNode(rule_context.context->nextNodeId(), std::move(new_project_step), {result});
|
||||
}
|
||||
|
@ -266,7 +266,7 @@ TransformResult PullLeftJoinFilterThroughInnerJoin::transformImpl(PlanNodePtr no
|
|||
assignments.emplace_back(item.name, std::make_shared<ASTIdentifier>(item.name));
|
||||
name_to_type[item.name] = item.type;
|
||||
}
|
||||
auto new_project_step = std::make_shared<ProjectionStep>(new_filter_node->getStep()->getOutputStream(), assignments, name_to_type);
|
||||
auto new_project_step = std::make_shared<ProjectionStep>(new_filter_node->getStep()->getOutputStream(), std::move(assignments), std::move(name_to_type));
|
||||
return PlanNodeBase::createPlanNode(rule_context.context->nextNodeId(), std::move(new_project_step), {new_filter_node});
|
||||
}
|
||||
return new_filter_node;
|
||||
|
|
|
@ -42,7 +42,7 @@ ConstASTPtr CommonPredicatesRewriter::visitNode(const ConstASTPtr & node, NodeCo
|
|||
|
||||
ConstASTPtr CommonPredicatesRewriter::visitASTFunction(const ConstASTPtr & node, NodeContext & node_context)
|
||||
{
|
||||
auto & fun = node->as<ASTFunction &>();
|
||||
const auto & fun = node->as<ASTFunction &>();
|
||||
if (fun.name == PredicateConst::AND || fun.name == PredicateConst::OR)
|
||||
{
|
||||
std::vector<ConstASTPtr> extracted_predicates = PredicateUtils::extractPredicate(node);
|
||||
|
@ -50,18 +50,17 @@ ConstASTPtr CommonPredicatesRewriter::visitASTFunction(const ConstASTPtr & node,
|
|||
for (auto & predicate : extracted_predicates)
|
||||
{
|
||||
NodeContext child_context{.root = NodeContext::Root::NOT_ROOT_NODE, .context = node_context.context};
|
||||
auto rewritten = process(predicate, child_context);
|
||||
result.emplace_back(rewritten);
|
||||
result.emplace_back(process(predicate, child_context));
|
||||
}
|
||||
ASTPtr combined_predicate = PredicateUtils::combinePredicates(fun.name, result);
|
||||
auto combined_fun = combined_predicate->as<ASTFunction>();
|
||||
const auto & combined_fun = combined_predicate->as<ASTFunction>();
|
||||
if (combined_fun == nullptr || (combined_fun->name != PredicateConst::AND && combined_fun->name != PredicateConst::OR))
|
||||
{
|
||||
return combined_predicate;
|
||||
}
|
||||
auto simplified = PredicateUtils::extractCommonPredicates(combined_predicate, node_context.context);
|
||||
// Prefer AND at the root if possible
|
||||
auto simplified_fun = simplified->as<ASTFunction>();
|
||||
const auto & simplified_fun = simplified->as<ASTFunction>();
|
||||
if (node_context.root == NodeContext::Root::ROOT_NODE && simplified_fun && simplified_fun->name == PredicateConst::OR)
|
||||
{
|
||||
return PredicateUtils::distributePredicate(simplified, node_context.context);
|
||||
|
@ -98,7 +97,7 @@ ConstASTPtr SwapPredicateRewriter::visitNode(const ConstASTPtr & node, Void & co
|
|||
|
||||
ConstASTPtr SwapPredicateRewriter::visitASTFunction(const ConstASTPtr & predicate, Void & context)
|
||||
{
|
||||
auto & function = predicate->as<ASTFunction &>();
|
||||
const auto & function = predicate->as<ASTFunction &>();
|
||||
if (function.name == "and")
|
||||
{
|
||||
std::vector<ConstASTPtr> conjuncts = PredicateUtils::extractConjuncts(predicate);
|
||||
|
|
|
@ -64,7 +64,7 @@ public:
|
|||
|
||||
Void visitTableScanNode(TableScanNode & node, Void &) override
|
||||
{
|
||||
auto table_step = dynamic_cast<const TableScanStep *>(node.getStep().get());
|
||||
const auto * table_step = dynamic_cast<const TableScanStep *>(node.getStep().get());
|
||||
for (const auto & item : table_step->getColumnAlias())
|
||||
{
|
||||
auto column_reference = std::make_shared<ASTTableColumnReference>(table_step->getStorage(), item.first);
|
||||
|
@ -107,7 +107,7 @@ public:
|
|||
|
||||
ASTPtr visitASTIdentifier(ASTPtr & expr, Void & context) override
|
||||
{
|
||||
auto & name = expr->as<ASTIdentifier &>().name();
|
||||
const auto & name = expr->as<ASTIdentifier &>().name();
|
||||
|
||||
if (expression_lineage.count(name))
|
||||
return expression_lineage.at(name)->clone();
|
||||
|
|
|
@ -13,41 +13,26 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
#include <algorithm>
|
||||
#include <Optimizer/SymbolUtils.h>
|
||||
|
||||
namespace DB
|
||||
{
|
||||
bool SymbolUtils::contains(std::vector<String> & symbols, String symbol)
|
||||
{
|
||||
if (std::find(symbols.begin(), symbols.end(), symbol) != symbols.end())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
return std::find(symbols.begin(), symbols.end(), symbol) != symbols.end();
|
||||
}
|
||||
|
||||
bool SymbolUtils::containsAll(std::set<String> & left_symbols, std::set<String> & right_symbols)
|
||||
{
|
||||
for (auto & symbol : right_symbols)
|
||||
{
|
||||
if (!left_symbols.contains(symbol))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
return std::includes(left_symbols.begin(), left_symbols.end(), right_symbols.begin(), right_symbols.end());
|
||||
}
|
||||
|
||||
bool SymbolUtils::containsAll(std::vector<String> & left_symbols, std::set<String> & right_symbols)
|
||||
{
|
||||
for (auto & symbol : right_symbols)
|
||||
{
|
||||
if (std::find(left_symbols.begin(), left_symbols.end(), symbol) == left_symbols.end())
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
return std::all_of(right_symbols.begin(), right_symbols.end(), [&left_symbols](const String & symbol) {
|
||||
return std::find(left_symbols.begin(), left_symbols.end(), symbol) != left_symbols.end();
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ std::set<std::string> SymbolsExtractor::extract(ConstASTPtr node)
|
|||
static SymbolVisitor visitor;
|
||||
SymbolVisitorContext context;
|
||||
ASTVisitorUtil::accept(node, visitor, context);
|
||||
if (context.exclude_symbols.size() != 0)
|
||||
if (!context.exclude_symbols.empty())
|
||||
{
|
||||
throw Exception("exclude_symbols should be null", ErrorCodes::LOGICAL_ERROR);
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ Void SymbolVisitor::visitNode(const ConstASTPtr & node, SymbolVisitorContext & c
|
|||
|
||||
Void SymbolVisitor::visitASTIdentifier(const ConstASTPtr & node, SymbolVisitorContext & context)
|
||||
{
|
||||
auto & identifier = node->as<ASTIdentifier &>();
|
||||
const auto & identifier = node->as<ASTIdentifier &>();
|
||||
if (!context.exclude_symbols.count(identifier.name()))
|
||||
{
|
||||
context.result.insert(identifier.name());
|
||||
|
@ -75,7 +75,7 @@ Void SymbolVisitor::visitASTIdentifier(const ConstASTPtr & node, SymbolVisitorCo
|
|||
|
||||
Void SymbolVisitor::visitASTFunction(const ConstASTPtr & node, SymbolVisitorContext & context)
|
||||
{
|
||||
auto & ast_func = node->as<const ASTFunction &>();
|
||||
const auto & ast_func = node->as<const ASTFunction &>();
|
||||
if (ast_func.name == "lambda")
|
||||
{
|
||||
auto exclude_symbols = RequiredSourceColumnsMatcher::extractNamesFromLambda(ast_func);
|
||||
|
|
|
@ -90,6 +90,7 @@
|
|||
#include <Core/Types.h>
|
||||
#include <IO/ReadHelpers.h>
|
||||
#include <IO/WriteHelpers.h>
|
||||
#include "Parsers/IAST_fwd.h"
|
||||
|
||||
#include <memory>
|
||||
|
||||
|
@ -129,6 +130,19 @@ void serializeAST(const ASTPtr & ast, WriteBuffer & buf)
|
|||
writeBinary(false, buf);
|
||||
}
|
||||
|
||||
void serializeAST(const ConstASTPtr & ast, WriteBuffer & buf)
|
||||
{
|
||||
if (ast)
|
||||
{
|
||||
writeBinary(true, buf);
|
||||
writeBinary(UInt8(ast->getType()), buf);
|
||||
ast->serialize(buf);
|
||||
}
|
||||
else
|
||||
writeBinary(false, buf);
|
||||
}
|
||||
|
||||
|
||||
ASTPtr deserializeAST(ReadBuffer & buf)
|
||||
{
|
||||
bool has_ast;
|
||||
|
@ -150,7 +164,7 @@ void serializeASTs(const ASTs & asts, WriteBuffer & buf)
|
|||
{
|
||||
writeVarUInt(asts.size(), buf);
|
||||
|
||||
for (auto & ast : asts)
|
||||
for (const auto & ast : asts)
|
||||
{
|
||||
serializeAST(ast, buf);
|
||||
}
|
||||
|
|
|
@ -29,6 +29,8 @@ void serializeAST(const ASTPtr & ast, WriteBuffer & buf);
|
|||
|
||||
void serializeAST(const IAST & ast, WriteBuffer & buf);
|
||||
|
||||
void serializeAST(const ConstASTPtr & ast, WriteBuffer & buf);
|
||||
|
||||
ASTPtr deserializeAST(ReadBuffer & buf);
|
||||
|
||||
void serializeASTs(const ASTs & asts, WriteBuffer & buf);
|
||||
|
|
|
@ -167,7 +167,7 @@ void FilterStep::serialize(WriteBuffer & buf) const
|
|||
writeBinary(false, buf);
|
||||
if (filter)
|
||||
{
|
||||
serializeAST(filter->clone(), buf);
|
||||
serializeAST(filter, buf);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
|
|
@ -265,7 +265,7 @@ void JoinStep::serialize(WriteBuffer & buf, bool with_output) const
|
|||
writeVectorBinary(left_keys, buf);
|
||||
writeVectorBinary(right_keys, buf);
|
||||
|
||||
serializeAST(filter->clone(), buf);
|
||||
serializeAST(filter, buf);
|
||||
writeBinary(has_using, buf);
|
||||
|
||||
writeBinary(require_right_keys.has_value(), buf);
|
||||
|
|
|
@ -178,6 +178,13 @@ ColumnPtr deserializeColumn(ReadBuffer & buf)
|
|||
}
|
||||
|
||||
void serializeBlock(const Block & block, WriteBuffer & buf)
|
||||
{
|
||||
BlockOutputStreamPtr block_out
|
||||
= std::make_shared<NativeBlockOutputStream>(buf, ClickHouseRevision::getVersionRevision(), block);
|
||||
block_out->write(block);
|
||||
}
|
||||
|
||||
void serializeBlockWithData(const Block & block, WriteBuffer & buf)
|
||||
{
|
||||
BlockOutputStreamPtr block_out
|
||||
= std::make_shared<NativeBlockOutputStream>(buf, ClickHouseRevision::getVersionRevision(), block.cloneEmpty());
|
||||
|
|
|
@ -99,6 +99,7 @@ std::vector<T> deserializeItemVector(ReadBuffer & buf)
|
|||
}
|
||||
|
||||
void serializeBlock(const Block & block, WriteBuffer & buf);
|
||||
void serializeBlockWithData(const Block & block, WriteBuffer & buf);
|
||||
Block deserializeBlock(ReadBuffer & buf);
|
||||
|
||||
void serializeColumn(const ColumnPtr & column, const DataTypePtr & data_type, WriteBuffer & buf);
|
||||
|
|
|
@ -109,7 +109,7 @@ void ProjectionStep::serialize(WriteBuffer & buf) const
|
|||
for (const auto & item : assignments)
|
||||
{
|
||||
writeStringBinary(item.first, buf);
|
||||
serializeAST(item.second->clone(), buf);
|
||||
serializeAST(item.second, buf);
|
||||
}
|
||||
|
||||
writeVarUInt(name_to_type.size(), buf);
|
||||
|
|
|
@ -184,7 +184,7 @@ public:
|
|||
TableHandler table_handler(table_info);
|
||||
table_handler.registerHandler(std::make_unique<RowCountHandler>(handler_context));
|
||||
|
||||
for (auto & col_desc : cols_desc)
|
||||
for (const auto & col_desc : cols_desc)
|
||||
{
|
||||
table_handler.registerHandler(std::make_unique<FirstFullColumnHandler>(handler_context, col_desc));
|
||||
}
|
||||
|
@ -202,7 +202,7 @@ public:
|
|||
TableHandler table_handler(table_info);
|
||||
table_handler.registerHandler(std::make_unique<RowCountHandler>(handler_context));
|
||||
bool to_collect = false;
|
||||
for (auto & col_desc : cols_desc)
|
||||
for (const auto & col_desc : cols_desc)
|
||||
{
|
||||
auto & col_info = handler_context.columns_data.at(col_desc.name);
|
||||
if (std::llround(col_info.ndv_value_opt.value()) >= 2)
|
||||
|
|
|
@ -253,7 +253,7 @@ Histogram Histogram::createEqualFilter(double value) const
|
|||
}
|
||||
}
|
||||
}
|
||||
return Histogram{new_buckets};
|
||||
return Histogram{std::move(new_buckets)};
|
||||
}
|
||||
|
||||
Histogram Histogram::createNotEqualFilter(double value) const
|
||||
|
|
|
@ -118,7 +118,7 @@ std::optional<PlanNodeStatisticsPtr> StatisticsCollector::toPlanNodeStatistics()
|
|||
auto table_row_count = table_stats.basic->getRowCount();
|
||||
result->updateRowCount(table_row_count);
|
||||
// whether to construct single bucket histogram from min/max if there is no histogram
|
||||
for (auto & [col, stats] : columns_stats)
|
||||
for (const auto & [col, stats] : columns_stats)
|
||||
{
|
||||
auto symbol = std::make_shared<SymbolStatistics>();
|
||||
|
||||
|
|
|
@ -127,15 +127,15 @@ String StatsNdvBucketsExtendImpl<T>::serialize() const
|
|||
Protos::StatsNdvBucketsExtend pb;
|
||||
pb.set_bounds_blob(bounds_.serialize());
|
||||
|
||||
for (auto & count : counts_)
|
||||
for (const auto & count : counts_)
|
||||
{
|
||||
pb.add_counts(count);
|
||||
}
|
||||
for (auto & cpc : cpc_sketches_)
|
||||
for (const auto & cpc : cpc_sketches_)
|
||||
{
|
||||
pb.add_cpc_sketch_blobs(cpc.serialize());
|
||||
}
|
||||
for (auto & block_cpc : block_cpc_sketches_)
|
||||
for (const auto & block_cpc : block_cpc_sketches_)
|
||||
{
|
||||
pb.add_block_cpc_sketch_blobs(block_cpc.serialize());
|
||||
}
|
||||
|
|
|
@ -340,9 +340,6 @@ def configure_testcase_args(args, case_file, suite_tmp_dir, stderr_file):
|
|||
return testcase_args
|
||||
|
||||
def run_single_test(args, ext, server_logs_level, client_options, case_file, stdout_file, stderr_file):
|
||||
# To help debug CI failure due to AttributeError
|
||||
if type(args) is tuple:
|
||||
print(args)
|
||||
client = args.testcase_client
|
||||
start_time = args.testcase_start_time
|
||||
database = args.testcase_database
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
0 1
|
||||
1 \N
|
||||
2 21
|
||||
3 \N
|
||||
4 41
|
||||
5 \N
|
||||
6 61
|
||||
7 \N
|
||||
8 81
|
||||
9 \N
|
|
@ -0,0 +1,2 @@
|
|||
SET enable_shuffle_with_order = 1;
|
||||
SELECT number, joined FROM system.numbers ANY LEFT JOIN (SELECT number * 2 AS number, number * 10 + 1 AS joined FROM system.numbers LIMIT 10) USING number LIMIT 10
|
|
@ -0,0 +1,10 @@
|
|||
0 0 0
|
||||
1 \N \N
|
||||
2 2 1
|
||||
3 \N \N
|
||||
4 4 2
|
||||
5 \N \N
|
||||
6 6 3
|
||||
7 \N \N
|
||||
8 8 4
|
||||
9 \N \N
|
|
@ -0,0 +1,10 @@
|
|||
SET enable_shuffle_with_order = 1;
|
||||
SELECT a.*, b.* FROM
|
||||
(
|
||||
SELECT number AS k FROM system.numbers LIMIT 10
|
||||
) AS a
|
||||
ANY LEFT JOIN
|
||||
(
|
||||
SELECT number * 2 AS k, number AS joined FROM system.numbers LIMIT 10
|
||||
) AS b
|
||||
USING k;
|
|
@ -0,0 +1,5 @@
|
|||
0 0 0
|
||||
2 2 1
|
||||
4 4 2
|
||||
6 6 3
|
||||
8 8 4
|
|
@ -0,0 +1,10 @@
|
|||
SET enable_shuffle_with_order = 1;
|
||||
SELECT a.*, b.* FROM
|
||||
(
|
||||
SELECT number AS k FROM system.numbers LIMIT 10
|
||||
) AS a
|
||||
ANY INNER JOIN
|
||||
(
|
||||
SELECT number * 2 AS k, number AS joined FROM system.numbers LIMIT 10
|
||||
) AS b
|
||||
USING k;
|
|
@ -0,0 +1,10 @@
|
|||
0 0 0
|
||||
0 0 1
|
||||
1 1 2
|
||||
1 1 3
|
||||
2 2 4
|
||||
2 2 5
|
||||
3 3 6
|
||||
3 3 7
|
||||
4 4 8
|
||||
4 4 9
|
|
@ -0,0 +1,10 @@
|
|||
SET enable_shuffle_with_order = 1;
|
||||
SELECT a.*, b.* FROM
|
||||
(
|
||||
SELECT number AS k FROM system.numbers LIMIT 10
|
||||
) AS a
|
||||
ALL INNER JOIN
|
||||
(
|
||||
SELECT intDiv(number, 2) AS k, number AS joined FROM system.numbers LIMIT 10
|
||||
) AS b
|
||||
USING k;
|
|
@ -0,0 +1,10 @@
|
|||
0 0 0 Hello
|
||||
1 0.5 \N \N
|
||||
2 1 3 Hello
|
||||
3 1.5 \N \N
|
||||
4 2 6 Hello
|
||||
5 2.5 \N \N
|
||||
6 3 9 Hello
|
||||
7 3.5 \N \N
|
||||
8 4 \N \N
|
||||
9 4.5 \N \N
|
|
@ -0,0 +1,2 @@
|
|||
SET enable_shuffle_with_order = 1;
|
||||
SELECT number, number / 2 AS n, j1, j2 FROM system.numbers ANY LEFT JOIN (SELECT number / 3 AS n, number AS j1, 'Hello' AS j2 FROM system.numbers LIMIT 10) USING n LIMIT 10
|
|
@ -0,0 +1,6 @@
|
|||
1 2 3 4
|
||||
2 3 4 5
|
||||
1 2 3 4
|
||||
2 3 4 5
|
||||
1 2 3 4
|
||||
2 3 4 5
|
|
@ -0,0 +1,3 @@
|
|||
SELECT a,b,c,d FROM (SELECT 1 AS a,2 AS b, 3 AS c UNION ALL SELECT 2,3,4 ) ANY INNER JOIN (SELECT 1 AS a,2 AS b,4 AS d UNION ALL SELECT 2,3,5) USING (a) ORDER BY a,b,c,d ASC;
|
||||
SELECT a,b,c,d FROM (SELECT 1 AS a,2 AS b, 3 AS c UNION ALL SELECT 2,3,4 ) ALL LEFT JOIN (SELECT 1 AS a,2 AS b,4 AS d UNION ALL SELECT 2,3,5) USING (a) ORDER BY a,b,c,d ASC;
|
||||
SELECT a,b,c,d FROM (SELECT 1 AS a,2 AS b, 3 AS c UNION ALL SELECT 2,3,4 ) ALL LEFT JOIN (SELECT 1 AS a,2 AS b,4 AS d UNION ALL SELECT 2,3,5) USING a,b ORDER BY a,b,c,d ASC;
|
|
@ -0,0 +1 @@
|
|||
1 abc
|
|
@ -0,0 +1,14 @@
|
|||
SELECT
|
||||
1 AS DomainID,
|
||||
Domain
|
||||
FROM system.one
|
||||
ANY LEFT JOIN
|
||||
(
|
||||
SELECT
|
||||
1 AS DomainID,
|
||||
'abc' AS Domain
|
||||
UNION ALL
|
||||
SELECT
|
||||
2 AS DomainID,
|
||||
'def' AS Domain
|
||||
) USING DomainID;
|
|
@ -0,0 +1 @@
|
|||
1
|
|
@ -0,0 +1 @@
|
|||
SELECT 1 AS k FROM system.one ANY LEFT JOIN (SELECT k FROM (SELECT 1 AS k, 2 AS x)) USING k;
|
|
@ -0,0 +1,256 @@
|
|||
0 0 0
|
||||
1 1 1
|
||||
10 10 10
|
||||
100 100 100
|
||||
101 101 101
|
||||
102 102 102
|
||||
103 103 103
|
||||
104 104 104
|
||||
105 105 105
|
||||
106 106 106
|
||||
107 107 107
|
||||
108 108 108
|
||||
109 109 109
|
||||
11 11 11
|
||||
110 110 110
|
||||
111 111 111
|
||||
112 112 112
|
||||
113 113 113
|
||||
114 114 114
|
||||
115 115 115
|
||||
116 116 116
|
||||
117 117 117
|
||||
118 118 118
|
||||
119 119 119
|
||||
12 12 12
|
||||
120 120 120
|
||||
121 121 121
|
||||
122 122 122
|
||||
123 123 123
|
||||
124 124 124
|
||||
125 125 125
|
||||
126 126 126
|
||||
127 127 127
|
||||
128 128 128
|
||||
129 129 129
|
||||
13 13 13
|
||||
130 130 130
|
||||
131 131 131
|
||||
132 132 132
|
||||
133 133 133
|
||||
134 134 134
|
||||
135 135 135
|
||||
136 136 136
|
||||
137 137 137
|
||||
138 138 138
|
||||
139 139 139
|
||||
14 14 14
|
||||
140 140 140
|
||||
141 141 141
|
||||
142 142 142
|
||||
143 143 143
|
||||
144 144 144
|
||||
145 145 145
|
||||
146 146 146
|
||||
147 147 147
|
||||
148 148 148
|
||||
149 149 149
|
||||
15 15 15
|
||||
150 150 150
|
||||
151 151 151
|
||||
152 152 152
|
||||
153 153 153
|
||||
154 154 154
|
||||
155 155 155
|
||||
156 156 156
|
||||
157 157 157
|
||||
158 158 158
|
||||
159 159 159
|
||||
16 16 16
|
||||
160 160 160
|
||||
161 161 161
|
||||
162 162 162
|
||||
163 163 163
|
||||
164 164 164
|
||||
165 165 165
|
||||
166 166 166
|
||||
167 167 167
|
||||
168 168 168
|
||||
169 169 169
|
||||
17 17 17
|
||||
170 170 170
|
||||
171 171 171
|
||||
172 172 172
|
||||
173 173 173
|
||||
174 174 174
|
||||
175 175 175
|
||||
176 176 176
|
||||
177 177 177
|
||||
178 178 178
|
||||
179 179 179
|
||||
18 18 18
|
||||
180 180 180
|
||||
181 181 181
|
||||
182 182 182
|
||||
183 183 183
|
||||
184 184 184
|
||||
185 185 185
|
||||
186 186 186
|
||||
187 187 187
|
||||
188 188 188
|
||||
189 189 189
|
||||
19 19 19
|
||||
190 190 190
|
||||
191 191 191
|
||||
192 192 192
|
||||
193 193 193
|
||||
194 194 194
|
||||
195 195 195
|
||||
196 196 196
|
||||
197 197 197
|
||||
198 198 198
|
||||
199 199 199
|
||||
2 2 2
|
||||
20 20 20
|
||||
200 200 200
|
||||
201 201 201
|
||||
202 202 202
|
||||
203 203 203
|
||||
204 204 204
|
||||
205 205 205
|
||||
206 206 206
|
||||
207 207 207
|
||||
208 208 208
|
||||
209 209 209
|
||||
21 21 21
|
||||
210 210 210
|
||||
211 211 211
|
||||
212 212 212
|
||||
213 213 213
|
||||
214 214 214
|
||||
215 215 215
|
||||
216 216 216
|
||||
217 217 217
|
||||
218 218 218
|
||||
219 219 219
|
||||
22 22 22
|
||||
220 220 220
|
||||
221 221 221
|
||||
222 222 222
|
||||
223 223 223
|
||||
224 224 224
|
||||
225 225 225
|
||||
226 226 226
|
||||
227 227 227
|
||||
228 228 228
|
||||
229 229 229
|
||||
23 23 23
|
||||
230 230 230
|
||||
231 231 231
|
||||
232 232 232
|
||||
233 233 233
|
||||
234 234 234
|
||||
235 235 235
|
||||
236 236 236
|
||||
237 237 237
|
||||
238 238 238
|
||||
239 239 239
|
||||
24 24 24
|
||||
240 240 240
|
||||
241 241 241
|
||||
242 242 242
|
||||
243 243 243
|
||||
244 244 244
|
||||
245 245 245
|
||||
246 246 246
|
||||
247 247 247
|
||||
248 248 248
|
||||
249 249 249
|
||||
25 25 25
|
||||
250 250 250
|
||||
251 251 251
|
||||
252 252 252
|
||||
253 253 253
|
||||
254 254 254
|
||||
255 255 255
|
||||
26 26 26
|
||||
27 27 27
|
||||
28 28 28
|
||||
29 29 29
|
||||
3 3 3
|
||||
30 30 30
|
||||
31 31 31
|
||||
32 32 32
|
||||
33 33 33
|
||||
34 34 34
|
||||
35 35 35
|
||||
36 36 36
|
||||
37 37 37
|
||||
38 38 38
|
||||
39 39 39
|
||||
4 4 4
|
||||
40 40 40
|
||||
41 41 41
|
||||
42 42 42
|
||||
43 43 43
|
||||
44 44 44
|
||||
45 45 45
|
||||
46 46 46
|
||||
47 47 47
|
||||
48 48 48
|
||||
49 49 49
|
||||
5 5 5
|
||||
50 50 50
|
||||
51 51 51
|
||||
52 52 52
|
||||
53 53 53
|
||||
54 54 54
|
||||
55 55 55
|
||||
56 56 56
|
||||
57 57 57
|
||||
58 58 58
|
||||
59 59 59
|
||||
6 6 6
|
||||
60 60 60
|
||||
61 61 61
|
||||
62 62 62
|
||||
63 63 63
|
||||
64 64 64
|
||||
65 65 65
|
||||
66 66 66
|
||||
67 67 67
|
||||
68 68 68
|
||||
69 69 69
|
||||
7 7 7
|
||||
70 70 70
|
||||
71 71 71
|
||||
72 72 72
|
||||
73 73 73
|
||||
74 74 74
|
||||
75 75 75
|
||||
76 76 76
|
||||
77 77 77
|
||||
78 78 78
|
||||
79 79 79
|
||||
8 8 8
|
||||
80 80 80
|
||||
81 81 81
|
||||
82 82 82
|
||||
83 83 83
|
||||
84 84 84
|
||||
85 85 85
|
||||
86 86 86
|
||||
87 87 87
|
||||
88 88 88
|
||||
89 89 89
|
||||
9 9 9
|
||||
90 90 90
|
||||
91 91 91
|
||||
92 92 92
|
||||
93 93 93
|
||||
94 94 94
|
||||
95 95 95
|
||||
96 96 96
|
||||
97 97 97
|
||||
98 98 98
|
||||
99 99 99
|
|
@ -0,0 +1,3 @@
|
|||
SET max_rows_to_read = 1000000;
|
||||
SET read_overflow_mode = 'break';
|
||||
SELECT concat(toString(number % 256 AS n), '') AS s, n, max(s) FROM system.numbers_mt GROUP BY s, n, n, n, n, n, n, n, n, n ORDER BY s, n;
|
|
@ -0,0 +1,24 @@
|
|||
2015-01-01 ['Hello','World']
|
||||
2015-01-01 Hello
|
||||
2015-01-01 World
|
||||
2015-01-01 Hello
|
||||
2015-01-01 ['Hello','World'] [0,0]
|
||||
2015-01-01 Hello 0
|
||||
2015-01-01 World 0
|
||||
2015-01-01 Hello 0
|
||||
2015-01-01 ['Hello','World'] [0,0]
|
||||
2015-01-01 ['Hello2','World2'] [0,0]
|
||||
2015-01-01 Hello 0
|
||||
2015-01-01 Hello2 0
|
||||
2015-01-01 World 0
|
||||
2015-01-01 World2 0
|
||||
2015-01-01 Hello 0
|
||||
2015-01-01 Hello2 0
|
||||
2015-01-01 ['Hello','World'] [0,0]
|
||||
2015-01-01 ['Hello2','World2'] [0,0]
|
||||
2015-01-01 Hello 0
|
||||
2015-01-01 Hello2 0
|
||||
2015-01-01 World 0
|
||||
2015-01-01 World2 0
|
||||
2015-01-01 Hello 0
|
||||
2015-01-01 Hello2 0
|
|
@ -0,0 +1,23 @@
|
|||
USE test;
|
||||
DROP TABLE IF EXISTS alter_00147;
|
||||
CREATE TABLE alter_00147 (d Date DEFAULT toDate('2015-01-01'), n Nested(x String)) ENGINE = CnchMergeTree
|
||||
PARTITION BY d ORDER BY d SETTINGS index_granularity = 8192;
|
||||
INSERT INTO alter_00147 (`n.x`) VALUES (['Hello', 'World']);
|
||||
SELECT * FROM alter_00147 order by n.x;
|
||||
SELECT * FROM alter_00147 ARRAY JOIN n order by n.x;
|
||||
SELECT * FROM alter_00147 ARRAY JOIN n WHERE n.x LIKE '%Hello%';
|
||||
ALTER TABLE alter_00147 ADD COLUMN n.y Array(UInt64);
|
||||
SELECT * FROM alter_00147 order by n.x;
|
||||
SELECT * FROM alter_00147 ARRAY JOIN n order by n.x;
|
||||
SELECT * FROM alter_00147 ARRAY JOIN n WHERE n.x LIKE '%Hello%';
|
||||
INSERT INTO alter_00147 (`n.x`) VALUES (['Hello2', 'World2']);
|
||||
SELECT * FROM alter_00147 ORDER BY n.x;
|
||||
SELECT * FROM alter_00147 ARRAY JOIN n ORDER BY n.x;
|
||||
SELECT * FROM alter_00147 ARRAY JOIN n WHERE n.x LIKE '%Hello%' ORDER BY n.x;
|
||||
SYSTEM START MERGES test.alter_00147;
|
||||
SELECT sleep(3) FORMAT Null;
|
||||
OPTIMIZE TABLE test.alter_00147;
|
||||
SELECT * FROM alter_00147 ORDER BY n.x;
|
||||
SELECT * FROM alter_00147 ARRAY JOIN n order by n.x;
|
||||
SELECT * FROM alter_00147 ARRAY JOIN n WHERE n.x LIKE '%Hello%' ORDER BY n.x;
|
||||
DROP TABLE alter_00147;
|
|
@ -0,0 +1 @@
|
|||
[1] 123 124
|
|
@ -0,0 +1 @@
|
|||
SELECT arrayMap(x -> 1, [2]), 123 AS x, x + 1;
|
|
@ -0,0 +1 @@
|
|||
1 1 1
|
|
@ -0,0 +1 @@
|
|||
SELECT identity(1 AS a) AS b, a, b;
|
|
@ -0,0 +1,6 @@
|
|||
2 3000
|
||||
2 3000
|
||||
1 2000
|
||||
2 3000
|
||||
1 2000
|
||||
2 3000
|
|
@ -0,0 +1,7 @@
|
|||
SET enable_shuffle_with_order = 1;
|
||||
SELECT a, b FROM (SELECT 1 AS a, 2000 AS b) ANY RIGHT JOIN (SELECT 2 AS a, 3000 AS b) USING a, b;
|
||||
SELECT a, b FROM (SELECT 1 AS a, 2000 AS b) ANY RIGHT JOIN (SELECT 2 AS a, 3000 AS b) USING b, a;
|
||||
|
||||
SELECT a, b FROM (SELECT 1 AS a, 2000 AS b) ANY RIGHT JOIN (SELECT 2 AS a, 3000 AS b UNION ALL SELECT 1 AS a, 2000 AS b) USING a, b order by a;
|
||||
SELECT a, b FROM (SELECT 1 AS a, 2000 AS b) ANY RIGHT JOIN (SELECT 2 AS a, 3000 AS b UNION ALL SELECT 1 AS a, 2000 AS b) USING b, a order by a;
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
\N
|
||||
2
|
||||
42 1 1 \N \N
|
|
@ -0,0 +1,9 @@
|
|||
select b from (select 1 as a, 42 as c) any left join (select 2 as b, 2 as b, 41 as c) using c;
|
||||
select b from (select 1 as a, 42 as c) any left join (select 2 as b, 2 as b, 42 as c) using c;
|
||||
|
||||
select c,a,a,b,b from
|
||||
(select 1 as a, 1 as a, 42 as c group by c order by a,c)
|
||||
any left join
|
||||
(select 2 as b, 2 as b, 41 as c group by c order by b,c)
|
||||
using c
|
||||
order by b;
|
|
@ -0,0 +1,14 @@
|
|||
1 1
|
||||
1 1
|
||||
1 0
|
||||
1 1 1 1 0 1
|
||||
0
|
||||
0
|
||||
1
|
||||
0
|
||||
0
|
||||
1
|
||||
0
|
||||
0
|
||||
0
|
||||
0
|
|
@ -0,0 +1,13 @@
|
|||
select 1 as x, x = 1 or x = 2 or x = 3 or x = -1;
|
||||
select 1 as x, x = 1.0 or x = 2 or x = 3 or x = -1;
|
||||
select 1 as x, x = 1.5 or x = 2 or x = 3 or x = -1;
|
||||
|
||||
SELECT
|
||||
1 IN (1, -1, 2.0, 2.5),
|
||||
1.0 IN (1, -1, 2.0, 2.5),
|
||||
1 IN (1.0, -1, 2.0, 2.5),
|
||||
1.0 IN (1.0, -1, 2.0, 2.5),
|
||||
1 IN (1.1, -1, 2.0, 2.5),
|
||||
-1 IN (1, -1, 2.0, 2.5);
|
||||
|
||||
SELECT -number IN (1, 2, 3, -5.0, -2.0) FROM system.numbers LIMIT 10;
|
|
@ -0,0 +1,2 @@
|
|||
1
|
||||
3
|
|
@ -0,0 +1,2 @@
|
|||
SELECT 1 x FROM system.one;
|
||||
SELECT 1 + (2 AS x) y FROM system.one;
|
|
@ -0,0 +1,15 @@
|
|||
2000-01-01 1 [''] ['Hello'] [0]
|
||||
2000-01-01 1 [''] ['Hello'] [0]
|
||||
2000-01-01 2 [''] ['World'] [0]
|
||||
2000-01-01 1 [''] ['Hello'] [0]
|
||||
2000-01-01 2 [''] ['World'] [0]
|
||||
2000-01-01 1 [''] [1] [0]
|
||||
2000-01-01 2 [''] [2] [0]
|
||||
Array(Enum16(\'Hello\' = 1, \'World\' = 2, \'a\' = 300)) Array(String) Array(Enum16(\'Hello\' = 1, \'World\' = 2, \'a\' = 300))
|
||||
['Hello','World'] ['Hello','World'] ['Hello','World']
|
||||
['Hello','World'] ['Hello','a'] ['World','a']
|
||||
2000-01-01 1 ['system','rtb.client'] ['hello','world']
|
||||
2000-01-01 1 ['system','rtb.client'] ['hello','world']
|
||||
2000-01-01 2 ['http.status','http.code'] ['hello','goodbye']
|
||||
2000-01-01 1 [''] ['Hello.world'] [0]
|
||||
2000-01-01 1 [''] ['Hello.world'] [0]
|
|
@ -0,0 +1,67 @@
|
|||
USE test;
|
||||
DROP TABLE IF EXISTS test.enum_nested_alter;
|
||||
CREATE TABLE test.enum_nested_alter(d Date DEFAULT '2000-01-01', x UInt64, n Nested(a String, e Enum8('Hello' = 1), b UInt8)) ENGINE = CnchMergeTree() PARTITION BY toYYYYMM(d) ORDER BY x SETTINGS index_granularity=1;
|
||||
|
||||
INSERT INTO test.enum_nested_alter (x, n.e) VALUES (1, ['Hello']);
|
||||
SELECT * FROM test.enum_nested_alter;
|
||||
|
||||
ALTER TABLE test.enum_nested_alter MODIFY COLUMN n.e Array(Enum8('Hello' = 1, 'World' = 2));
|
||||
INSERT INTO test.enum_nested_alter (x, n.e) VALUES (2, ['World']);
|
||||
SELECT * FROM test.enum_nested_alter ORDER BY x;
|
||||
|
||||
ALTER TABLE test.enum_nested_alter MODIFY COLUMN n.e Array(Enum16('Hello' = 1, 'World' = 2, 'a' = 300));
|
||||
SELECT * FROM test.enum_nested_alter ORDER BY x;
|
||||
|
||||
ALTER TABLE test.enum_nested_alter MODIFY COLUMN n.e Array(Int16);
|
||||
SELECT * FROM test.enum_nested_alter ORDER BY x;
|
||||
|
||||
DROP TABLE test.enum_nested_alter;
|
||||
CREATE TABLE test.enum_nested_alter(n Nested(a Int16, b Enum16('Hello' = 1, 'World' = 2, 'a' = 300), c String)) Engine = CnchMergeTree() ORDER BY tuple() SETTINGS index_granularity = 1;
|
||||
INSERT INTO test.enum_nested_alter VALUES ([1, 2], ['Hello', 'World'], ['Hello', 'World']), ([1, 2], ['Hello', 'a'], ['World', 'a']);
|
||||
|
||||
ALTER TABLE test.enum_nested_alter MODIFY COLUMN n.a Array(Enum16('Hello' = 1, 'World' = 2, 'a' = 300));
|
||||
ALTER TABLE test.enum_nested_alter MODIFY COLUMN n.b Array(String);
|
||||
ALTER TABLE test.enum_nested_alter MODIFY COLUMN n.c Array(Enum16('Hello' = 1, 'World' = 2, 'a' = 300));
|
||||
|
||||
SELECT toTypeName(n.a), toTypeName(n.b), toTypeName(n.c) FROM test.enum_nested_alter LIMIT 1;
|
||||
SELECT * FROM test.enum_nested_alter;
|
||||
|
||||
DROP TABLE test.enum_nested_alter;
|
||||
|
||||
|
||||
CREATE TABLE test.enum_nested_alter
|
||||
(
|
||||
d Date DEFAULT '2000-01-01',
|
||||
x UInt64,
|
||||
tasks Nested(
|
||||
errcategory Enum8(
|
||||
'undefined' = 0, 'system' = 1, 'generic' = 2, 'asio.netdb' = 3, 'asio.misc' = 4,
|
||||
'asio.addrinfo' = 5, 'rtb.client' = 6, 'rtb.logic' = 7, 'http.status' = 8),
|
||||
status Enum16('hello' = 1, 'world' = 2)))
|
||||
ENGINE = CnchMergeTree() PARTITION BY toYYYYMM(d) ORDER BY x SETTINGS index_granularity=1;
|
||||
|
||||
INSERT INTO test.enum_nested_alter (x, tasks.errcategory, tasks.status) VALUES (1, ['system', 'rtb.client'], ['hello', 'world']);
|
||||
SELECT * FROM test.enum_nested_alter ORDER BY x;
|
||||
|
||||
ALTER TABLE test.enum_nested_alter
|
||||
MODIFY COLUMN tasks.errcategory Array(Enum8(
|
||||
'undefined' = 0, 'system' = 1, 'generic' = 2, 'asio.netdb' = 3, 'asio.misc' = 4,
|
||||
'asio.addrinfo' = 5, 'rtb.client' = 6, 'rtb.logic' = 7, 'http.status' = 8, 'http.code' = 9)),
|
||||
MODIFY COLUMN tasks.status Array(Enum8('hello' = 1, 'world' = 2, 'goodbye' = 3));
|
||||
|
||||
INSERT INTO test.enum_nested_alter (x, tasks.errcategory, tasks.status) VALUES (2, ['http.status', 'http.code'], ['hello', 'goodbye']);
|
||||
SELECT * FROM test.enum_nested_alter ORDER BY x;
|
||||
|
||||
DROP TABLE test.enum_nested_alter;
|
||||
|
||||
|
||||
DROP TABLE IF EXISTS test.enum_nested_alter;
|
||||
CREATE TABLE test.enum_nested_alter(d Date DEFAULT '2000-01-01', x UInt64, n Nested(a String, e Enum8('Hello.world' = 1), b UInt8)) ENGINE = CnchMergeTree() PARTITION BY toYYYYMM(d) ORDER BY x SETTINGS index_granularity = 1;
|
||||
|
||||
INSERT INTO test.enum_nested_alter (x, n.e) VALUES (1, ['Hello.world']);
|
||||
SELECT * FROM test.enum_nested_alter;
|
||||
|
||||
ALTER TABLE test.enum_nested_alter MODIFY COLUMN n.e Array(Enum8('Hello.world' = 1, 'a' = 2));
|
||||
SELECT * FROM test.enum_nested_alter;
|
||||
|
||||
DROP TABLE test.enum_nested_alter;
|
|
@ -0,0 +1 @@
|
|||
1 1
|
|
@ -0,0 +1,7 @@
|
|||
DROP TABLE IF EXISTS test.merge_tree;
|
||||
CREATE TABLE test.merge_tree (x UInt64, date Date) ENGINE = CnchMergeTree() PARTITION BY toYYYYMM(date) ORDER BY x SETTINGS index_granularity=1;
|
||||
|
||||
INSERT INTO test.merge_tree VALUES (1, '2000-01-01');
|
||||
SELECT x AS y, y FROM test.merge_tree;
|
||||
|
||||
DROP TABLE IF EXISTS test.merge_tree;
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue