I tried to "generate" a spark query in this way
def stdizedOperationmode(sqLContext: SQLContext,withrul: DataFrame): DataFrame = {
// see http://spark.apache.org/docs/latest/sql-programming-guide.html
import sqLContext.implicits._
val AZ: Column = lit(0.00000001)
def opMode(id:Int): Column = {
(column("s"+id) - coalesce(column("a"+id) / column("sd"+id), column("a"+id) / lit(AZ))).as("std"+id)
}
// add the 21 std<i> columns based on s<i> - (a<id>/sd<id>)
val columns: IndexedSeq[Column] = 1 to 21 map(id => opMode(id))
val withStd = withrul.select(columns:_*)
withStd
}
Question how do I add "all other columns" (*) idea: something like withrul.select('* :+ columns:_*)