spark VectorizedValuesReader 源码

  • 2022-10-20
  • 浏览 (240)

spark VectorizedValuesReader 代码

文件路径:/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedValuesReader.java

/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements.  See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License.  You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.apache.spark.sql.execution.datasources.parquet;

import java.nio.ByteBuffer;

import org.apache.spark.sql.execution.vectorized.WritableColumnVector;

import org.apache.parquet.io.api.Binary;

/**
 * Interface for value decoding that supports vectorized (aka batched) decoding.
 * TODO: merge this into parquet-mr.
 */
public interface VectorizedValuesReader {
  boolean readBoolean();
  byte readByte();
  short readShort();
  int readInteger();
  long readLong();
  float readFloat();
  double readDouble();
  Binary readBinary(int len);

  /*
   * Reads `total` values into `c` start at `c[rowId]`
   */
  void readBooleans(int total, WritableColumnVector c, int rowId);
  void readBytes(int total, WritableColumnVector c, int rowId);
  void readShorts(int total, WritableColumnVector c, int rowId);
  void readIntegers(int total, WritableColumnVector c, int rowId);
  void readIntegersWithRebase(int total, WritableColumnVector c, int rowId, boolean failIfRebase);
  void readUnsignedIntegers(int total, WritableColumnVector c, int rowId);
  void readUnsignedLongs(int total, WritableColumnVector c, int rowId);
  void readLongs(int total, WritableColumnVector c, int rowId);
  void readLongsWithRebase(
      int total,
      WritableColumnVector c,
      int rowId,
      boolean failIfRebase,
      String timeZone);
  void readFloats(int total, WritableColumnVector c, int rowId);
  void readDoubles(int total, WritableColumnVector c, int rowId);
  void readBinary(int total, WritableColumnVector c, int rowId);

   /*
    * Skips `total` values
    */
   void skipBooleans(int total);
   void skipBytes(int total);
   void skipShorts(int total);
   void skipIntegers(int total);
   void skipLongs(int total);
   void skipFloats(int total);
   void skipDoubles(int total);
   void skipBinary(int total);
   void skipFixedLenByteArray(int total, int len);

  /**
   * A functional interface to write integer values to columnar output
   */
  @FunctionalInterface
  interface IntegerOutputWriter {

    /**
     * A functional interface that writes a long value to a specified row in an output column
     * vector
     *
     * @param outputColumnVector the vector to write to
     * @param rowId the row to write to
     * @param val value to write
     */
    void write(WritableColumnVector outputColumnVector, int rowId, long val);
  }

  @FunctionalInterface
  interface ByteBufferOutputWriter {
    void write(WritableColumnVector c, int rowId, ByteBuffer val, int length);

    static void writeArrayByteBuffer(WritableColumnVector c, int rowId, ByteBuffer val,
        int length) {
      c.putByteArray(rowId,
          val.array(),
          val.arrayOffset() + val.position(),
          length);
    }

    static void skipWrite(WritableColumnVector c, int rowId, ByteBuffer val, int length) { }
  }
}

相关信息

spark 源码目录

相关文章

spark ParquetColumnVector 源码

spark ParquetDictionary 源码

spark ParquetFooterReader 源码

spark ParquetReadState 源码

spark ParquetVectorUpdater 源码

spark ParquetVectorUpdaterFactory 源码

spark SpecificParquetRecordReaderBase 源码

spark VectorizedColumnReader 源码

spark VectorizedDeltaBinaryPackedReader 源码

spark VectorizedDeltaByteArrayReader 源码

0  赞