---过去一个星期每日影像下载数量---
def NumberOfTiffDownWeek(spark: SparkSession): Unit = {
val dataFrame: DataFrame = spark.sql(
"""
|SELECT
| date(dday) time,
| sum(ct) as num
|FROM
| (
| SELECT
| datelist as dday,
| 0 as ct
| FROM
| calendar
| where datelist>date_sub(current_timestamp,7) and datelist<current_timestamp
| UNION ALL
| select
| date_format(created_at,'yyyy-MM-dd') as dday,
| count(1) as ct
| from products
| where datediff(current_timestamp, created_at)<7
| group by date_format(created_at,'yyyy-MM-dd')
| ) a
|GROUP BY time
|order by time
|""".stripMargin)
val transforDF =dataFrame.select(dataFrame.col("time").cast(StringType),dataFrame.col("num"))
dataFrame2Json(transforDF, "过去一个星期每日影像下载数量")
println("过去一个星期每日影像下载数量已更新至postgres")
}
---过去一个月每日影像入库景数---
def TotalNumTiffEveryDay(spark: SparkSession): Unit = {
val dataFrame: DataFrame = spark.sql(
"""
|SELECT
| date(dday) time,
| sum(ct) num
|FROM
| (
| SELECT
| datelist as dday,
| 0 as ct
| FROM
| calendar
| where datelist>date_sub(current_timestamp,30) and datelist<current_timestamp
| UNION ALL
| select
| date_format(storage_at,'yyyy-MM-dd') dday
| ,count(1) ct
| from image_package
| where datediff(current_timestamp, storage_at)<30
| GROUP by date_format(storage_at,'yyyy-MM-dd')
| ) a
|GROUP BY time
|order by time
|""".stripMargin)
val transforDF: DataFrame = dataFrame.select(dataFrame.col("time").cast(StringType),dataFrame.col("num"))
dataFrame2Json(transforDF,"过去一个月每日影像入库景数")
println("过去一个月每日影像入库景数已更新至postgres")
}