Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;

public class TransformOperator implements ProcessOperator {

Expand Down Expand Up @@ -223,8 +224,9 @@

@SuppressWarnings("squid:S112")
@Override
public TsBlock next() throws Exception {

Check failure on line 227 in iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/operator/process/TransformOperator.java

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Refactor this method to reduce its Cognitive Complexity from 16 to the 15 allowed.

See more on https://sonarcloud.io/project/issues?id=apache_iotdb&issues=AZ0-P1wFnyOtrsjoghy4&open=AZ0-P1wFnyOtrsjoghy4&pullRequest=17395

long start = System.nanoTime();
try {
YieldableState yieldableState = iterateAllColumnsToNextValid();
if (yieldableState == YieldableState.NOT_YIELDABLE_WAITING_FOR_DATA) {
Expand All @@ -236,9 +238,10 @@
final TimeColumnBuilder timeBuilder = tsBlockBuilder.getTimeColumnBuilder();
final ColumnBuilder[] columnBuilders = tsBlockBuilder.getValueColumnBuilders();
final int columnCount = columnBuilders.length;

int rowCount = 0;
while (!timeHeap.isEmpty()) {
long maxRuntime = operatorContext.getMaxRunTime().roundTo(TimeUnit.NANOSECONDS);

Check warning on line 241 in iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/operator/process/TransformOperator.java

View check run for this annotation

SonarQubeCloud / SonarCloud Code Analysis

Change this instance-reference to a static reference.

See more on https://sonarcloud.io/project/issues?id=apache_iotdb&issues=AZ0-P1wFnyOtrsjoghy5&open=AZ0-P1wFnyOtrsjoghy5&pullRequest=17395
while (!timeHeap.isEmpty()
&& !tsBlockBuilder.isFull()
&& System.nanoTime() - start < maxRuntime) {
final long currentTime = timeHeap.pollFirst();

// time
Expand All @@ -253,25 +256,26 @@
}
timeHeap.add(currentTime);

tsBlockBuilder.declarePositions(rowCount);
return tsBlockBuilder.build();
}
}

prepareEachColumn(columnCount);

++rowCount;
tsBlockBuilder.declarePosition();

yieldableState = iterateAllColumnsToNextValid();
if (yieldableState == YieldableState.NOT_YIELDABLE_WAITING_FOR_DATA) {
tsBlockBuilder.declarePositions(rowCount);
return tsBlockBuilder.build();
}

inputLayer.updateRowRecordListEvictionUpperBound();
}

tsBlockBuilder.declarePositions(rowCount);
if (tsBlockBuilder.isEmpty()) {
return null;
}

return tsBlockBuilder.build();
} catch (Exception e) {
throw new RuntimeException(e);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,26 +20,39 @@
package org.apache.iotdb.db.queryengine.execution.operator;

import org.apache.iotdb.commons.concurrent.IoTDBThreadPoolFactory;
import org.apache.iotdb.commons.path.PartialPath;
import org.apache.iotdb.commons.udf.service.UDFClassLoaderManager;
import org.apache.iotdb.db.queryengine.common.FragmentInstanceId;
import org.apache.iotdb.db.queryengine.common.NodeRef;
import org.apache.iotdb.db.queryengine.common.PlanFragmentId;
import org.apache.iotdb.db.queryengine.common.QueryId;
import org.apache.iotdb.db.queryengine.execution.driver.DriverContext;
import org.apache.iotdb.db.queryengine.execution.fragment.FragmentInstanceContext;
import org.apache.iotdb.db.queryengine.execution.fragment.FragmentInstanceStateMachine;
import org.apache.iotdb.db.queryengine.execution.operator.process.TransformOperator;
import org.apache.iotdb.db.queryengine.execution.operator.source.SeriesScanOperator;
import org.apache.iotdb.db.queryengine.plan.expression.Expression;
import org.apache.iotdb.db.queryengine.plan.expression.leaf.TimeSeriesOperand;
import org.apache.iotdb.db.queryengine.plan.planner.plan.node.PlanNodeId;
import org.apache.iotdb.db.queryengine.plan.planner.plan.parameter.InputLocation;
import org.apache.iotdb.db.queryengine.transformation.api.LayerReader;
import org.apache.iotdb.db.queryengine.transformation.dag.input.QueryDataSetInputLayer;
import org.apache.iotdb.db.queryengine.transformation.dag.input.TsBlockInputDataSet;

import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.apache.tsfile.common.conf.TSFileDescriptor;
import org.apache.tsfile.enums.TSDataType;
import org.apache.tsfile.read.common.block.TsBlock;
import org.apache.tsfile.read.common.block.column.LongColumn;
import org.apache.tsfile.read.common.block.column.TimeColumn;
import org.junit.Assert;
import org.junit.Test;

import java.time.ZoneId;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;

import static org.apache.iotdb.db.queryengine.execution.fragment.FragmentInstanceContext.createFragmentInstanceContext;
Expand Down Expand Up @@ -139,4 +152,127 @@ public long ramBytesUsed() {
reader.yield();
reader.consumedAll();
}

@Test
public void testTransformResultLimit() throws Exception {
UDFClassLoaderManager.setupAndGetInstance();
int savedMaxLine = TSFileDescriptor.getInstance().getConfig().getMaxTsBlockLineNumber();
try {
int rowCount = 2001;
int maxLine = 200;
TSFileDescriptor.getInstance().getConfig().setMaxTsBlockLineNumber(200);
QueryId queryId = new QueryId("stub_query_chunk");
FragmentInstanceId instanceId =
new FragmentInstanceId(new PlanFragmentId(queryId, 0), "stub-instance");
FragmentInstanceStateMachine stateMachine =
new FragmentInstanceStateMachine(
instanceId,
IoTDBThreadPoolFactory.newFixedThreadPool(1, "test-instance-notification"));
FragmentInstanceContext fragmentInstanceContext =
createFragmentInstanceContext(instanceId, stateMachine);
DriverContext driverContext = new DriverContext(fragmentInstanceContext, 0);
PlanNodeId scanNodeId = new PlanNodeId("scan");
driverContext.addOperatorContext(1, scanNodeId, SeriesScanOperator.class.getSimpleName());
PlanNodeId transformNodeId = new PlanNodeId("transform");
driverContext.addOperatorContext(2, transformNodeId, TransformOperator.class.getSimpleName());

long[] times = new long[rowCount];
long[] values = new long[rowCount];
for (int i = 0; i < rowCount; i++) {
times[i] = i;
values[i] = i * 10L;
}
TsBlock oneBatch =
new TsBlock(
new TimeColumn(rowCount, times), new LongColumn(rowCount, Optional.empty(), values));

Operator childOperator =
new Operator() {
boolean consumed = false;

@Override
public OperatorContext getOperatorContext() {
return driverContext.getOperatorContexts().get(0);
}

@Override
public TsBlock next() {
if (!consumed) {
consumed = true;
return oneBatch;
}
return null;
}

@Override
public boolean hasNext() {
return !consumed;
}

@Override
public void close() {}

@Override
public boolean isFinished() {
return consumed;
}

@Override
public long calculateMaxPeekMemory() {
return oneBatch.getSizeInBytes();
}

@Override
public long calculateMaxReturnSize() {
return oneBatch.getSizeInBytes();
}

@Override
public long calculateRetainedSizeAfterCallingNext() {
return 0;
}

@Override
public long ramBytesUsed() {
return 0;
}
};

TimeSeriesOperand s1 =
new TimeSeriesOperand(new PartialPath("root.sg.d1.s1"), TSDataType.INT64);
Map<String, List<InputLocation>> inputLocations =
ImmutableMap.of(s1.getExpressionString(), ImmutableList.of(new InputLocation(0, 0)));
Map<NodeRef<Expression>, TSDataType> expressionTypes = new HashMap<>();
expressionTypes.put(NodeRef.of(s1), TSDataType.INT64);

TransformOperator transform =
new TransformOperator(
driverContext.getOperatorContexts().get(1),
childOperator,
ImmutableList.of(TSDataType.INT64),
inputLocations,
new Expression[] {s1},
true,
ZoneId.systemDefault(),
expressionTypes,
true);

int totalOutRows = 0;
int nonNullNextCount = 0;
while (transform.hasNext()) {
TsBlock out = transform.next();
if (out != null) {
nonNullNextCount++;
Assert.assertTrue(
"Each batch must be at most " + maxLine + " rows", out.getPositionCount() <= maxLine);
totalOutRows += out.getPositionCount();
}
}
Assert.assertEquals(rowCount, totalOutRows);
System.out.println(nonNullNextCount);
Assert.assertTrue(nonNullNextCount >= 11);
} finally {
TSFileDescriptor.getInstance().getConfig().setMaxTsBlockLineNumber(savedMaxLine);
}
}
}
Loading