我正在尝试在flink中实现窗口触发器,如果平均值高于阈值,它将触发。
流媒体数据有学生姓名&标记由,
分隔。如果学生的平均分数跨越90
,则无论尝试次数多少,都必须触发该窗口。
示例数据:
Fred,88
Fred,91
Wilma,93
.
.
目前的Flink代码:
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.assigners.GlobalWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.triggers.Trigger.TriggerContext
import org.apache.flink.streaming.api.windowing.triggers.{CountTrigger, PurgingTrigger, Trigger, TriggerResult}
import org.apache.flink.streaming.api.windowing.windows.{GlobalWindow, Window}
case class Marks(name : String, mark : Double, count : Int)
class MarksTrigger[W <: Window] extends Trigger[Marks,W] {
override def onElement(element: Marks, timestamp: Long, window: W, ctx: TriggerContext): TriggerResult = {
if(element.mark > 90) TriggerResult.FIRE // fire if avg mark is > 90
else TriggerResult.CONTINUE
}
override def onProcessingTime(time: Long, window: W, ctx: TriggerContext): TriggerResult = {
TriggerResult.CONTINUE
}
override def onEventTime(time: Long, window: W, ctx: TriggerContext): TriggerResult = {
TriggerResult.CONTINUE
}
override def clear(window: W, ctx: TriggerContext) = ???
}
object Main {
def main(args: Array[String]) {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val data = env.socketTextStream("localhost", 9999)
val fdata = data.map { values =>
val columns = values.split(",")
Marks(columns(0), columns(1).toDouble, 1)
}
val keyed = fdata.keyBy(_.name).
window(GlobalWindows.create()).
trigger(new MarksTrigger[GlobalWindow]()). // TODO
keyed.print()
env.execute()
}
}
计算平均值 在批处理模式下尝试以下操作
case class Marks(name : String, mark : Double, count : Int)
val data = benv.fromElements(("Fred", 88.0), ("Fred", 95.0), ("Fred", 91.0), ("Wilma", 93.0), ("Wilma", 95.0), ("Wilma", 98.0))
data.map(x => (x._1, x._2, 1)).groupBy(0).reduce { (x, y) =>
(x._1, x._2 + y._2, x._3 + y._3)
}.map(x => Marks(x._1, x._2/x._3, x._3)).collect
我如何将这些绑在一起?应该在计算平均值或平均值之前调用.window()
和.trigger()
来在onElement()
内进行计算?
答案 0 :(得分:1)
我找到了解决方案
import org.apache.flink.api.scala.ExecutionEnvironment
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.assigners.GlobalWindows
import org.apache.flink.streaming.api.windowing.time.Time
import org.apache.flink.streaming.api.windowing.triggers.Trigger.TriggerContext
import org.apache.flink.streaming.api.windowing.triggers.{CountTrigger, PurgingTrigger, Trigger, TriggerResult}
import org.apache.flink.streaming.api.windowing.windows.{GlobalWindow, Window}
class MarksTrigger[W <: Window] extends Trigger[Marks,W] {
override def onElement(element: Marks, timestamp: Long, window: W, ctx: TriggerContext): TriggerResult = {
//trigger is fired if average marks of a student cross 80
if(element.mark > 90) TriggerResult.FIRE
else TriggerResult.CONTINUE
}
override def onProcessingTime(time: Long, window: W, ctx: TriggerContext): TriggerResult = {
TriggerResult.CONTINUE
}
override def onEventTime(time: Long, window: W, ctx: TriggerContext): TriggerResult = {
TriggerResult.CONTINUE
}
override def clear(window: W, ctx: TriggerContext) = ???
}
case class Marks(name : String, mark : Double, count : Int)
object Main {
def main(args: Array[String]) {
val env = StreamExecutionEnvironment.getExecutionEnvironment
val data = env.socketTextStream("localhost", 9999)
// data is obtained in "name,mark" format
val fdata = data.map { values =>
val columns = values.split(",")
(columns(0), columns(1).toDouble, 1)
}
// calculating average mark and number of exam attempts
val keyed1 = fdata.keyBy(0).reduce { (x,y) =>
(x._1, x._2 + y._2, x._3 + y._3)
}.map( x => Marks(x._1, x._2 / x._3, x._3))
val keyed = keyed1.keyBy(_.name).
window(GlobalWindows.create()).
trigger(PurgingTrigger.of(new MarksTrigger[GlobalWindow]())).
maxBy(1)
keyed.print()
env.execute()
}
}