7
7
#include < DataStreams/FilterBlockInputStream.h>
8
8
#include < DataStreams/LimitBlockInputStream.h>
9
9
#include < DataStreams/MergeSortingBlockInputStream.h>
10
+ #include < DataStreams/NullBlockInputStream.h>
10
11
#include < DataStreams/ParallelAggregatingBlockInputStream.h>
11
12
#include < DataStreams/PartialSortingBlockInputStream.h>
12
13
#include < DataStreams/UnionBlockInputStream.h>
16
17
#include < Parsers/ASTSelectQuery.h>
17
18
#include < Storages/RegionQueryInfo.h>
18
19
#include < Storages/StorageMergeTree.h>
20
+ #include < Storages/Transaction/KVStore.h>
19
21
#include < Storages/Transaction/Region.h>
20
22
#include < Storages/Transaction/RegionException.h>
21
23
#include < Storages/Transaction/SchemaSyncer.h>
@@ -136,7 +138,7 @@ void InterpreterDAG::executeTS(const tipb::TableScan & ts, Pipeline & pipeline)
136
138
info.region_id = dag.getRegionID ();
137
139
info.version = dag.getRegionVersion ();
138
140
info.conf_version = dag.getRegionConfVersion ();
139
- auto current_region = context.getTMTContext ().getRegionTable (). getRegionByTableAndID (table_id, info.region_id );
141
+ auto current_region = context.getTMTContext ().getKVStore ()-> getRegion ( info.region_id );
140
142
if (!current_region)
141
143
{
142
144
std::vector<RegionID> region_ids;
@@ -148,6 +150,11 @@ void InterpreterDAG::executeTS(const tipb::TableScan & ts, Pipeline & pipeline)
148
150
query_info.mvcc_query_info ->concurrent = 0.0 ;
149
151
pipeline.streams = storage->read (required_columns, query_info, context, from_stage, max_block_size, max_streams);
150
152
153
+ if (pipeline.streams .empty ())
154
+ {
155
+ pipeline.streams .emplace_back (std::make_shared<NullBlockInputStream>(storage->getSampleBlockForColumns (required_columns)));
156
+ }
157
+
151
158
pipeline.transform ([&](auto & stream) { stream->addTableLock (table_lock); });
152
159
153
160
// / Set the limits and quota for reading data, the speed and time of the query.
@@ -178,7 +185,6 @@ void InterpreterDAG::executeTS(const tipb::TableScan & ts, Pipeline & pipeline)
178
185
}
179
186
});
180
187
}
181
- ColumnsWithTypeAndName columnsWithTypeAndName = pipeline.firstStream ()->getHeader ().getColumnsWithTypeAndName ();
182
188
}
183
189
184
190
InterpreterDAG::AnalysisResult InterpreterDAG::analyzeExpressions ()
0 commit comments