Search code examples
scalaapache-sparkapache-spark-sql

How can I get all names of the arrays on Dataframe


How can I get all names of the arrays on Dataframe ?

The problem is i am trying to explode ALL arrays.

import org.apache.spark.sql.{Column, DataFrame, SparkSession}
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.types.{ArrayType, StructField, StructType}

val df=df.select(explode(col("**pr**")).as("collection")).select(col("collection.*"))
 def flattenSchema(schema: StructType, prefix: String = null) : Array[Column] = {
    schema.fields.flatMap(f => {
      val colName = if (prefix == null) f.name else (prefix + "." + f.name)

      f.dataType match {
        case st: StructType => flattenSchema(st, colName)
        case _ => Array(col(colName).alias(colName))
      }
    })
  }
  
  val newDF=prdf.select(flattenSchema(prdf.schema):_*)
  newDF.toDF(newDF.columns.map(_.replace(".", "_")): _*).printSchema

To GET all names of arrays i am tring to do :

to get the names i am doing df.schema.filter(st => st.dataType.isInstanceOf[ArrayType]).flatMap(.dataType.asInstanceOf[StructType].fields).map(.name)

Any help is appreciated.


Solution

  • Here's a recursive method that extracts all nested ArrayType columns from a DataFrame:

    import org.apache.spark.sql.types._
    
    def extractArrayCols(schema: StructType, prefix: String): Seq[String] =
      schema.fields.flatMap {
        case StructField(name, struct: StructType, _, _) => extractArrayCols(struct, prefix + name + ".")
        case StructField(name, ArrayType(_, _), _, _) => Seq(s"$prefix$name")
        case _ => Seq.empty[String]
      }
    

    Testing the method:

    import org.apache.spark.sql.functions._
    
    case class W(u: Int, v: Seq[String])
    
    val df = Seq(
      (10, Seq(1, 2), W(1, Seq("a", "b"))),
      (20, Seq(3), W(2, Seq("c", "d")))
    ).toDF("c1", "c2", "c3")
    
    val arrayCols = extractArrayCols(df.schema, "")
    // arrayCols: Seq[String] = ArraySeq(c2, c3.v)