Search code examples
sqlrelational-algebraapache-calcitesql-parser

Converting SQL Query with Aggregate Function to Relational Algebra Expression in Apache Calcite - No match found for function signature


I'm trying to convert a SQL query to a relational algebra expression using the Apache Calcite SqlToRelConverter.

It works fine for this query (quotes are for ensuring lowercase):

queryToRelationalAlgebraRoot("SELECT \"country\" FROM \"mytable\"")

But on this query it fails:

queryToRelationalAlgebraRoot("SELECT \"country\", SUM(\"salary\") FROM \"mytable\" GROUP BY \"country\"")

with this error:

org.apache.calcite.sql.validate.SqlValidatorException: No match found for function signature SUM(<NUMERIC>)

It seems that somehow the SQL validator doesn't have aggregation functions like sum or count registered.

case class Income(id: Int, salary: Double, country: String)

class SparkDataFrameTable(df: DataFrame) extends AbstractTable {

  def getRowType(typeFactory: RelDataTypeFactory): RelDataType = {
    val typeList = df.schema.fields.map {
      field => field.dataType match {
        case t: StringType => typeFactory.createSqlType(SqlTypeName.VARCHAR)
        case t: IntegerType => typeFactory.createSqlType(SqlTypeName.INTEGER)
        case t: DoubleType => typeFactory.createSqlType(SqlTypeName.DOUBLE)
      }
    }.toList.asJava
    val fieldNameList = df.schema.fieldNames.toList.asJava
    typeFactory.createStructType(typeList, fieldNameList)
  }

}

object RelationalAlgebra {

  def queryToRelationalAlgebraRoot(query: String): RelRoot = {
    val sqlParser = SqlParser.create(query)
    val sqlParseTree = sqlParser.parseQuery()

    val frameworkConfig = Frameworks.newConfigBuilder().build()
    val planner = new PlannerImpl(frameworkConfig)

    val rootSchema = CalciteSchema.createRootSchema(true, true)

    // some sample data for testing
    val inc1 = new Income(1, 100000, "USA")
    val inc2 = new Income(2, 110000, "USA")
    val inc3 = new Income(3, 80000, "Canada")
    val spark = SparkSession.builder().master("local").getOrCreate()
    import spark.implicits._
    val df = Seq(inc1, inc2, inc3).toDF()
    rootSchema.add("mytable", new SparkDataFrameTable(df))

    val defaultSchema = List[String]().asJava
    val calciteConnectionConfigProperties = new Properties()
    val calciteConnectionConfigImpl = new CalciteConnectionConfigImpl(calciteConnectionConfigProperties)
    val sqlTypeFactoryImpl = new SqlTypeFactoryImpl(RelDataTypeSystem.DEFAULT)
    val calciteCatelogReader = new CalciteCatalogReader(rootSchema, defaultSchema, sqlTypeFactoryImpl, calciteConnectionConfigImpl)
    val defaultValidator = SqlValidatorUtil.newValidator(new SqlStdOperatorTable(), calciteCatelogReader, sqlTypeFactoryImpl, SqlConformanceEnum.LENIENT)

    val relExpressionOptimizationCluster = RelOptCluster.create(new VolcanoPlanner(), new RexBuilder(sqlTypeFactoryImpl))

    val sqlToRelConfig = SqlToRelConverter.configBuilder().build()

    val sqlToRelConverter = new SqlToRelConverter(planner, defaultValidator, calciteCatelogReader, relExpressionOptimizationCluster, StandardConvertletTable.INSTANCE, sqlToRelConfig)

    sqlToRelConverter.convertQuery(sqlParseTree, true, true)
  }

}

Solution

  • The problem with the code is that new SqlStdOperatorTable() creates a validator which is not initialized. The correct way to use SqlStdOperatorTable is to use SqlStdOperatorTable.instance().

    I found the solution after emailing the [email protected] mailing list. I would like to thank Yuzhao Chen for looking into the question I had and pointing out the problem with my code.