package myfunctions import org.apache.spark.SparkContext import org.apache.spark.SparkConf import com.teradata.aster.spark._ import com.teradata.aster.spark.DataRow.DataRow import org.apache.spark.rdd.RDD import com.teradata.aster.spark.DataRow._ import org.apache.spark.Logging /** The UserEcho Class is an example wrapper function that uses the Aster-Spark API * and simply returns the input RDD as output. It mimics the behavior of an echo * function and it derives from the base class AsterSparkFunctionDR which hides * all the interactions with Aster. * The UserEcho class overrides the run method and implements the echo functionality. * */ class UserEcho (args : Array[String], name: String, mstr:String = null ) extends AsterSparkFunctionDR (args, name, mstr) { /** * The run method is overridden to implement the echo functionality. * @param input The input RDD that reads data from the source. The input RDD * is of RDD[DataRow] type. * @param sparkFuncParams String representing the parameters specific to the * function the user is implementing. In this example sparkFuncParams * parameter is not used. * @return The result RDD is an RDD containing all the input data. This * return RDD is returned to aster by the Aster-Spark frame work. */ override def run(input: RDD[DataRow], sparkFunctParams: String ): RDD[DataRow] = { input } } object UserEcho extends Logging{ def main(args: Array[String]): Unit = { val asf = new UserEcho(args, "UserEcho") asf.runAsterSparkFunction() } }