I have an array of dates [2014-11-08 06: 27: 00.0], and I would like to remove the brackets 2014-11-08 06: 27: 00.0.
val conf = new SparkConf(true)
.set("spark.cassandra.connection.host", "127.0.0.1").setAppName("CasteDate").setMaster("local[*]")
.set("spark.cassandra.connection.port", "9042")
.set("spark.driver.allowMultipleContexts", "true")
.set("spark.streaming.receiver.writeAheadLog.enable", "true")
val sc = new SparkContext(conf)
val ssc = new StreamingContext(sc, Seconds(1))
val csc=new CassandraSQLContext(sc)
val sqlContext = new org.apache.spark.sql.SQLContext(sc)
var input: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.S")
input.setTimeZone(TimeZone.getTimeZone("GMT"))
var dia: SimpleDateFormat = new SimpleDateFormat("dd")
var mes: SimpleDateFormat = new SimpleDateFormat("MM")
var ano: SimpleDateFormat = new SimpleDateFormat("yyyy")
var horas: SimpleDateFormat = new SimpleDateFormat("HH")
var minutos: SimpleDateFormat = new SimpleDateFormat("mm")
val data=csc.sql("SELECT timecol from smartgrids.analyzer_temp").collect()
import sqlContext.implicits._
val result = data.map(row => {
val day = dia.format(input.parse(row.toString()))
val month = mes.format(input.parse(row.toString()))
val year = ano.format(input.parse(row.toString()))
val hour = horas.format(input.parse(row.toString()))
val minute = minutos.format(input.parse(row.toString()))
})
val collection = sc.parallelize(Seq(("day", 2), ("month", 2), ("year", 4), ("hour", 2), ("minute", 2)))
collection.saveToCassandra("features", "datepart", SomeColumns("day", "month", "year", "hour", "minute"))
sc.stop()
After running the code I get the error:
java.text.ParseException: Unparseable date: "[2015-08-20 21:01:00.0]"
at java.text.DateFormat.parse(DateFormat.java:366)
I think error is because I'm mapping the date with the brackets, so I wanted to remove it. Can you help me please?