Skip to content

Commit c1dbd38

Browse files
committed
1
1 parent 7bf5923 commit c1dbd38

4 files changed

Lines changed: 8 additions & 5 deletions

File tree

paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkInternalRowWrapper.java

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -52,15 +52,18 @@
5252
import java.util.HashMap;
5353
import java.util.Map;
5454

55-
/** Wrapper to fetch value from the spark internal row. */
55+
/**
56+
* An {@link InternalRow} wraps spark {@link org.apache.spark.sql.catalyst.InternalRow} for v2
57+
* write.
58+
*/
5659
public class SparkInternalRowWrapper implements InternalRow, Serializable {
5760

5861
private final StructType tableSchema;
5962
private final int length;
6063
private final boolean blobAsDescriptor;
6164
@Nullable private final UriReaderFactory uriReaderFactory;
65+
@Nullable private final int[] fieldIndexMap;
6266

63-
@Nullable private int[] fieldIndexMap = null;
6467
private transient org.apache.spark.sql.catalyst.InternalRow internalRow;
6568

6669
public SparkInternalRowWrapper(StructType tableSchema, int length) {

paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/SparkRow.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@
5555

5656
import scala.collection.JavaConverters;
5757

58-
/** A {@link InternalRow} wraps spark {@link Row}. */
58+
/** An {@link InternalRow} wraps spark {@link Row} for v1 write. */
5959
public class SparkRow implements InternalRow, Serializable {
6060

6161
private final RowType type;

paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/catalog/functions/PaimonFunctions.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ import org.apache.paimon.shade.guava30.com.google.common.collect.{ImmutableMap,
2525
import org.apache.paimon.spark.SparkInternalRowWrapper
2626
import org.apache.paimon.spark.SparkTypeUtils.toPaimonRowType
2727
import org.apache.paimon.spark.catalog.functions.PaimonFunctions._
28-
import org.apache.paimon.spark.function.{DescriptorToStringFunction, DescriptorToStringUnbound, PathToDescriptorFunction, PathToDescriptorUnbound}
28+
import org.apache.paimon.spark.function.{DescriptorToStringUnbound, PathToDescriptorUnbound}
2929
import org.apache.paimon.table.{BucketMode, FileStoreTable}
3030
import org.apache.paimon.types.{ArrayType, DataType => PaimonDataType, LocalZonedTimestampType, MapType, RowType, TimestampType}
3131
import org.apache.paimon.utils.ProjectedRow

paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/BlobTestBase.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -220,7 +220,7 @@ class BlobTestBase extends PaimonSparkTestBase {
220220

221221
def bytesToHex(bytes: Array[Byte]): String = {
222222
val hexChars = new Array[Char](bytes.length * 2)
223-
for (j <- 0 until bytes.length) {
223+
for (j <- bytes.indices) {
224224
val v = bytes(j) & 0xff
225225
hexChars(j * 2) = HEX_ARRAY(v >>> 4)
226226
hexChars(j * 2 + 1) = HEX_ARRAY(v & 0x0f)

0 commit comments

Comments
 (0)