我正在搜索函数from_unixtime(bigint unixtime)的等效值,该函数存在于Spark和Flink-SQL中。
我的目标是转换这种格式: 1439799094
改为: 2015-05-18 05:43:37
发布于 2019-01-29 22:10:10
就用UDF吧!
https://ci.apache.org/projects/flink/flink-docs-stable/dev/table/udfs.html
示例使用
test.csv
creation_date|key
1535816823|1
1536392928|2
1536272308|3EpochTimeConverter.scala
import java.time.format.DateTimeFormatter
import java.time.{Instant, LocalDateTime, ZoneId}
import org.apache.flink.table.functions.ScalarFunction
class EpochTimeConverter extends ScalarFunction {
def eval(epochTime: Int): String = {
// For performance, you may cache `DateTimeFormatter` in real life
val timePattern = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss")
timePattern.format(LocalDateTime.ofInstant(Instant.ofEpochSecond(epochTime), ZoneId.systemDefault()))
}
}UdfExample.scala
import org.apache.flink.api.scala.{ExecutionEnvironment, _}
import org.apache.flink.table.api.scala._
import org.apache.flink.table.api.{TableEnvironment, Types}
import org.apache.flink.table.sources.CsvTableSource
import org.apache.flink.types.Row
object UdfExample {
def main(args: Array[String]): Unit = {
val env = ExecutionEnvironment.getExecutionEnvironment
val tableEnv = TableEnvironment.getTableEnvironment(env)
val csvData = CsvTableSource
.builder()
.path("test.csv")
.ignoreFirstLine()
.fieldDelimiter("|")
.field("creation_date", Types.INT)
.field("key", Types.INT)
.build()
tableEnv.registerTableSource("temp_table", csvData)
println("Without udf:")
tableEnv.sqlQuery("SELECT creation_date, key FROM temp_table").toDataSet[Row].print()
tableEnv.registerFunction("from_unixtime", new EpochTimeConverter())
println()
println("With udf:")
tableEnv.sqlQuery("select from_unixtime(creation_date),key from temp_table").toDataSet[Row].print()
}
}如果运行UdfExample.scala,它将产生类似的输出,如下所示:
Without udf:
1535816823,1
1536272308,3
1536392928,2
With udf:
2018-09-01 18:47:03,1
2018-09-07 01:18:28,3
2018-09-08 10:48:48,2https://stackoverflow.com/questions/52207823
复制相似问题