import org.tensorflow.example.Features
import org.tensorflow.example.Feature
import org.tensorflow.example.Example
import org.tensorflow.example.FloatList
import org.tensorflow.example.Int64List
import org.apache.hadoop.io.{BytesWritable, NullWritable}
import org.tensorflow.hadoop.io.TFRecordFileOutputFormat
def processLongArrayFeature(data: java.lang.Iterable[_ <: java.lang.Long]): Feature = {
val int_list = Int64List.newBuilder.addAllValue(data).build
Feature.newBuilder.setInt64List(int_list).build
}
def processFloatArrayFeature(data: java.lang.Iterable[java.lang.Float]): Feature = {
val float_list = FloatList.newBuilder.addAllValue(data).build
Feature.newBuilder.setFloatList(float_list).build
}
feature_DF = feature_DF.rdd.map(row => {
//...
val long_array = new util.ArrayList[java.lang.Long]()
val float_array = new util.ArrayList[java.lang.Float]()
val feature_builder: Features.Builder = Features.newBuilder()
feature_builder
.putFeature("float_array", processFloatArrayFeature(float_array))
.putFeature("long_array", processLongArrayFeature(long_array))
val result = Example.newBuilder.setFeatures(feature_builder.build).build.toByteArray
(new BytesWritable(result), null)
}).coalesce(100)
feature_DF.asInstanceOf[RDD[(BytesWritable, Null)]]
.saveAsNewAPIHadoopFile(the_path, classOf[BytesWritable], classOf[NullWritable], classOf[TFRecordFileOutputFormat], sparkContext.hadoopConfiguration)