spark ExpectsInputTypes 源码
spark ExpectsInputTypes 代码
文件路径:/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ExpectsInputTypes.scala
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch
import org.apache.spark.sql.errors.QueryErrorsBase
import org.apache.spark.sql.types.AbstractDataType
/**
* A trait that gets mixin to define the expected input types of an expression.
*
* This trait is typically used by operator expressions (e.g. [[Add]], [[Subtract]]) to define
* expected input types without any implicit casting.
*
* Most function expressions (e.g. [[Substring]] should extend [[ImplicitCastInputTypes]]) instead.
*/
trait ExpectsInputTypes extends Expression {
/**
* Expected input types from child expressions. The i-th position in the returned seq indicates
* the type requirement for the i-th child.
*
* The possible values at each position are:
* 1. a specific data type, e.g. LongType, StringType.
* 2. a non-leaf abstract data type, e.g. NumericType, IntegralType, FractionalType.
*/
def inputTypes: Seq[AbstractDataType]
override def checkInputDataTypes(): TypeCheckResult = {
ExpectsInputTypes.checkInputDataTypes(children, inputTypes)
}
}
object ExpectsInputTypes extends QueryErrorsBase {
def checkInputDataTypes(
inputs: Seq[Expression],
inputTypes: Seq[AbstractDataType]): TypeCheckResult = {
val mismatch = inputs.zip(inputTypes).zipWithIndex.collectFirst {
case ((input, expected), idx) if !expected.acceptsType(input.dataType) =>
DataTypeMismatch(
errorSubClass = "UNEXPECTED_INPUT_TYPE",
messageParameters = Map(
"paramIndex" -> (idx + 1).toString,
"requiredType" -> toSQLType(expected),
"inputSql" -> toSQLExpr(input),
"inputType" -> toSQLType(input.dataType)))
}
mismatch.getOrElse(TypeCheckResult.TypeCheckSuccess)
}
}
/**
* A mixin for the analyzer to perform implicit type casting using
* [[org.apache.spark.sql.catalyst.analysis.TypeCoercion.ImplicitTypeCasts]].
*/
trait ImplicitCastInputTypes extends ExpectsInputTypes {
// No other methods
}
相关信息
相关文章
spark ApplyFunctionExpression 源码
spark BloomFilterMightContain 源码
spark CallMethodViaReflection 源码
0
赞
- 所属分类: 前端技术
- 本文标签:
热门推荐
-
2、 - 优质文章
-
3、 gate.io
-
8、 golang
-
9、 openharmony
-
10、 Vue中input框自动聚焦