My requirement is to cast all Decimal data type in DataFrame to String. Logic is working fine with simple type but not working with ArrayType. Here is logic :-
var df = spark.sql("select * from test_1")
for(dt <- df.dtypes) {
if(dt._2.substring(0,7) == "Decimal"){
df = df.withColumn(dt._1,df(dt._1).cast("String"))
}
}
But column within arrayType remains unchanged although, they are decimal type. please help , how can I loop through nested element and cast it to string. This is schema of my dataframe:
scala> df.schema res77: org.apache.spark.sql.types.StructType = StructType(StructField(mstr_prov_id,StringType,true), StructField(prov_ctgry_cd,StringType,true), StructField(prov_orgnl_efctv_dt,TimestampType,true), StructField(prov_trmntn_dt,TimestampType,true), StructField(prov_trmntn_rsn_cd,StringType,true), StructField(npi_rqrd_ind,StringType,true), StructField(prov_stts_aray_txt,ArrayType(StructType(StructField(PROV_STTS_KEY,DecimalType(22,0),true), StructField(PROV_STTS_EFCTV_DT,TimestampType,true), StructField(PROV_STTS_CD,StringType,true), StructField(PROV_STTS_TRMNTN_DT,TimestampType,true), StructField(PROV_STTS_TRMNTN_RSN_CD,StringType,true)),true),true))