|
1 | 1 | package file_segment_analytics; |
2 | 2 |
|
3 | 3 | import java.io.InputStream; |
4 | | -import java.sql.Timestamp; |
| 4 | +import java.io.Serializable; |
5 | 5 | import java.time.Duration; |
6 | 6 | import java.time.Instant; |
7 | 7 | import java.util.Properties; |
8 | 8 |
|
9 | 9 | import org.apache.flink.api.common.eventtime.WatermarkStrategy; |
10 | 10 | import org.apache.flink.api.common.typeinfo.TypeInformation; |
| 11 | +import org.apache.flink.api.java.functions.KeySelector; |
| 12 | +import org.apache.flink.api.java.tuple.Tuple2; |
| 13 | +import org.apache.flink.api.java.typeutils.TypeExtractor; |
11 | 14 | import org.apache.flink.connector.jdbc.JdbcConnectionOptions; |
12 | | -import org.apache.flink.connector.jdbc.JdbcExecutionOptions; |
13 | | -import org.apache.flink.connector.jdbc.JdbcSink; |
14 | 15 | import org.apache.flink.connector.kafka.source.KafkaSource; |
15 | 16 | import org.apache.flink.formats.json.JsonDeserializationSchema; |
16 | | -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.core.JsonProcessingException; |
17 | | -import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper; |
18 | 17 | import org.apache.flink.streaming.api.datastream.DataStream; |
19 | 18 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; |
20 | 19 | import org.apache.flink.streaming.api.functions.sink.SinkFunction; |
|
23 | 22 | import org.apache.flink.streaming.api.windowing.time.Time; |
24 | 23 |
|
25 | 24 | import models.EnrichedFileSegment; |
| 25 | +import sinks.JdbcSinkFactory; |
| 26 | +import stats.UserAggregateStat; |
| 27 | +import stats.UserRollingStat; |
| 28 | +import stats.UserProjectAggregateStat; |
| 29 | +import stats.UserProjectRollingStat; |
| 30 | +import stats.StatFactory; |
26 | 31 |
|
27 | 32 | public class FileSegmentAnalyticsJob { |
28 | 33 | public static void main(String[] args) throws Exception { |
@@ -52,90 +57,96 @@ public static void main(String[] args) throws Exception { |
52 | 57 | })); |
53 | 58 |
|
54 | 59 | DataStream<UserAggregateStat> dailyStream = timestampedStream.keyBy(EnrichedFileSegment::getUser_id) |
55 | | - .window(TumblingEventTimeWindows.of(Time.days(1))).process(new UserStatWindowFunction("daily")); |
56 | | - |
57 | | - DataStream<UserAggregateStat> rollingStream = timestampedStream.keyBy(EnrichedFileSegment::getUser_id) |
58 | | - .window(SlidingEventTimeWindows.of(Time.hours(24), Time.seconds(10))) // TODO: Change this beheavior to |
59 | | - // 5 |
60 | | - // minutes I guess? |
61 | | - .process(new UserStatWindowFunction("rolling_24h")); |
| 60 | + .window(TumblingEventTimeWindows.of(Time.days(1))) |
| 61 | + .process( |
| 62 | + new UserStatWindowFunction<Integer, UserAggregateStat>("daily", new UserAggregateStatFactory())) |
| 63 | + .returns(TypeExtractor.getForClass(UserAggregateStat.class)); |
| 64 | + |
| 65 | + DataStream<UserRollingStat> rollingStream = timestampedStream.keyBy(EnrichedFileSegment::getUser_id) |
| 66 | + .window(SlidingEventTimeWindows.of(Time.hours(24), Time.seconds(10))) |
| 67 | + .process(new UserStatWindowFunction<Integer, UserRollingStat>("rolling_24h", |
| 68 | + new UserRollingStatFactory())) |
| 69 | + .returns(TypeExtractor.getForClass(UserRollingStat.class)); |
| 70 | + |
| 71 | + DataStream<UserProjectRollingStat> projectsRollingStream = timestampedStream |
| 72 | + .keyBy(new KeySelector<EnrichedFileSegment, Tuple2<Integer, String>>() { |
| 73 | + @Override |
| 74 | + public Tuple2<Integer, String> getKey(EnrichedFileSegment seg) throws Exception { |
| 75 | + return Tuple2.of(seg.getUser_id(), seg.getProject_path()); |
| 76 | + } |
| 77 | + }).window(SlidingEventTimeWindows.of(Time.hours(24), Time.seconds(10))) |
| 78 | + .process(new UserStatWindowFunction<Tuple2<Integer, String>, UserProjectRollingStat>("rolling_24h", |
| 79 | + new UserProjectRollingStatFactory())) |
| 80 | + .returns(TypeExtractor.getForClass(UserProjectRollingStat.class)); |
| 81 | + |
| 82 | + DataStream<UserProjectAggregateStat> projectsAggregateStream = timestampedStream |
| 83 | + .keyBy(new KeySelector<EnrichedFileSegment, Tuple2<Integer, String>>() { |
| 84 | + @Override |
| 85 | + public Tuple2<Integer, String> getKey(EnrichedFileSegment seg) throws Exception { |
| 86 | + return Tuple2.of(seg.getUser_id(), seg.getProject_path()); |
| 87 | + } |
| 88 | + }).window(TumblingEventTimeWindows.of(Time.hours(24))) |
| 89 | + .process(new UserStatWindowFunction<Tuple2<Integer, String>, UserProjectAggregateStat>("daily", |
| 90 | + new UserProjectAggregateStatFactory())) |
| 91 | + .returns(TypeExtractor.getForClass(UserProjectAggregateStat.class)); |
| 92 | + |
| 93 | + // Use stat type definitions for columns and conflict keys |
62 | 94 |
|
63 | 95 | JdbcConnectionOptions jdbcOptions = new JdbcConnectionOptions.JdbcConnectionOptionsBuilder() |
64 | 96 | .withUrl("jdbc:postgresql://postgres_db:5432/myapp").withDriverName("org.postgresql.Driver") |
65 | 97 | .withUsername("admin").withPassword("secure_password").build(); |
66 | 98 |
|
67 | | - dailyStream.addSink(createJdbcSink(jdbcOptions, 1, 0, false)); |
68 | | - |
69 | | - rollingStream.addSink(createJdbcSink(jdbcOptions, 1, 0, true)); |
| 99 | + // Sinks using asRecord (createGeneralSink) |
| 100 | + SinkFunction<UserAggregateStat> dailySink = JdbcSinkFactory.createGeneralSink("user_stats_aggregate", |
| 101 | + UserAggregateStat.PRIMITIVE_COLUMNS, UserAggregateStat.JSONB_COLUMNS, |
| 102 | + String.join(", ", UserAggregateStat.CONFLICT_KEYS), jdbcOptions, 10, 1000); |
| 103 | + SinkFunction<UserRollingStat> rollingSink = JdbcSinkFactory.createGeneralSink("user_stats_rolling", |
| 104 | + UserRollingStat.PRIMITIVE_COLUMNS, UserRollingStat.JSONB_COLUMNS, |
| 105 | + String.join(", ", UserRollingStat.CONFLICT_KEYS), jdbcOptions, 10, 1000); |
| 106 | + SinkFunction<UserProjectAggregateStat> projectSink = JdbcSinkFactory.createGeneralSink( |
| 107 | + "user_project_stats_aggregate", UserProjectAggregateStat.PRIMITIVE_COLUMNS, |
| 108 | + UserProjectAggregateStat.JSONB_COLUMNS, String.join(", ", UserProjectAggregateStat.CONFLICT_KEYS), |
| 109 | + jdbcOptions, 10, 1000); |
| 110 | + SinkFunction<UserProjectRollingStat> projectRollingSink = JdbcSinkFactory.createGeneralSink( |
| 111 | + "user_project_stats_Rolling", UserProjectRollingStat.PRIMITIVE_COLUMNS, |
| 112 | + UserProjectRollingStat.JSONB_COLUMNS, String.join(", ", UserProjectRollingStat.CONFLICT_KEYS), |
| 113 | + jdbcOptions, 10, 1000); |
| 114 | + |
| 115 | + dailyStream.addSink(dailySink); |
| 116 | + rollingStream.addSink(rollingSink); |
| 117 | + projectsAggregateStream.addSink(projectSink); |
| 118 | + projectsRollingStream.addSink(projectRollingSink); |
70 | 119 |
|
71 | 120 | env.execute("FileSegment Analytics"); |
72 | 121 | } |
73 | 122 |
|
74 | | - private static SinkFunction<UserAggregateStat> createJdbcSink(JdbcConnectionOptions jdbcOptions, int batchSize, |
75 | | - long batchIntervalMs, boolean rolling) { |
76 | | - String table = rolling ? "user_stats_rolling" : "user_stats_aggregate"; |
77 | | - String conflict = rolling ? "(user_id, window_type)" : "(user_id, window_type, window_start)"; |
78 | | - |
79 | | - String columns = "user_id, window_type, lang_durations, machine_durations, editor_durations, project_durations, activity_durations"; |
80 | | - String values = "?, ?, ?::jsonb, ?::jsonb, ?::jsonb, ?::jsonb, ?::jsonb"; |
| 123 | + public static class UserAggregateStatFactory implements StatFactory<Integer, UserAggregateStat>, Serializable { |
| 124 | + @Override |
| 125 | + public UserAggregateStat create(Integer key) { |
| 126 | + return new UserAggregateStat(key); |
| 127 | + } |
| 128 | + } |
81 | 129 |
|
82 | | - if (!rolling) { |
83 | | - columns += ", window_start, window_end"; |
84 | | - values += ", ?, ?"; |
| 130 | + public static class UserRollingStatFactory implements StatFactory<Integer, UserRollingStat>, Serializable { |
| 131 | + @Override |
| 132 | + public UserRollingStat create(Integer key) { |
| 133 | + return new UserRollingStat(key); |
85 | 134 | } |
| 135 | + } |
86 | 136 |
|
87 | | - String sql; |
88 | | - if (rolling) { |
89 | | - sql = "INSERT INTO user_stats_rolling (" |
90 | | - + "user_id, window_type, lang_durations, machine_durations, editor_durations, " |
91 | | - + "project_durations, activity_durations) " |
92 | | - + "VALUES (?, ?, ?::jsonb, ?::jsonb, ?::jsonb, ?::jsonb, ?::jsonb) " |
93 | | - + "ON CONFLICT (user_id, window_type) DO UPDATE SET " + "lang_durations = EXCLUDED.lang_durations, " |
94 | | - + "machine_durations = EXCLUDED.machine_durations, " |
95 | | - + "editor_durations = EXCLUDED.editor_durations, " |
96 | | - + "project_durations = EXCLUDED.project_durations, " |
97 | | - + "activity_durations = EXCLUDED.activity_durations, " + "updated_at = NOW();"; |
98 | | - } else { |
99 | | - sql = "INSERT INTO user_stats_aggregate (" |
100 | | - + "user_id, window_type, lang_durations, machine_durations, editor_durations, " |
101 | | - + "project_durations, activity_durations, window_start, window_end) " |
102 | | - + "VALUES (?, ?, ?, ?, ?::jsonb, ?::jsonb, ?::jsonb, ?::jsonb, ?::jsonb) " |
103 | | - + "ON CONFLICT (user_id, window_type, window_start) DO UPDATE SET " |
104 | | - + "window_end = EXCLUDED.window_end, " + "lang_durations = EXCLUDED.lang_durations, " |
105 | | - + "machine_durations = EXCLUDED.machine_durations, " |
106 | | - + "editor_durations = EXCLUDED.editor_durations, " |
107 | | - + "project_durations = EXCLUDED.project_durations, " |
108 | | - + "activity_durations = EXCLUDED.activity_durations, " + "updated_at = NOW();"; |
| 137 | + public static class UserProjectAggregateStatFactory |
| 138 | + implements StatFactory<Tuple2<Integer, String>, UserProjectAggregateStat>, Serializable { |
| 139 | + @Override |
| 140 | + public UserProjectAggregateStat create(Tuple2<Integer, String> key) { |
| 141 | + return new UserProjectAggregateStat(key.f0, key.f1); |
109 | 142 | } |
| 143 | + } |
110 | 144 |
|
111 | | - return JdbcSink.sink(sql, (ps, stat) -> { |
112 | | - ObjectMapper mapper = new ObjectMapper(); |
113 | | - UserAggregateStat userStat = (UserAggregateStat) stat; |
114 | | - ps.setInt(1, userStat.getUserId()); |
115 | | - ps.setString(2, stat.getWindowType()); |
116 | | - |
117 | | - if (!rolling) { |
118 | | - ps.setTimestamp(8, Timestamp.from(userStat.getWindowStart())); |
119 | | - ps.setTimestamp(9, Timestamp.from(userStat.getWindowEnd())); |
120 | | - } |
121 | | - |
122 | | - try { |
123 | | - // 5-9: Serialize Map fields to JSON strings |
124 | | - ps.setString(3, mapper.writeValueAsString(userStat.getLangDurations())); |
125 | | - ps.setString(4, mapper.writeValueAsString(userStat.getMachineDurations())); |
126 | | - ps.setString(5, mapper.writeValueAsString(userStat.getEditorDurations())); |
127 | | - ps.setString(6, mapper.writeValueAsString(userStat.getProjectDurations())); |
128 | | - ps.setString(7, mapper.writeValueAsString(userStat.getActivityDurations())); |
129 | | - } catch (JsonProcessingException e) { |
130 | | - // Handle serialization error gracefully |
131 | | - e.printStackTrace(); |
132 | | - for (int i = 3; i <= 7; i++) |
133 | | - ps.setString(i, "{}"); // Send empty JSON object on error |
134 | | - } |
135 | | - }, |
136 | | - // Define execution options (batching) |
137 | | - JdbcExecutionOptions.builder().withBatchSize(batchSize).withBatchIntervalMs(batchIntervalMs).build(), |
138 | | - // Provide JDBC connection details |
139 | | - jdbcOptions); |
| 145 | + public static class UserProjectRollingStatFactory |
| 146 | + implements StatFactory<Tuple2<Integer, String>, UserProjectRollingStat>, Serializable { |
| 147 | + @Override |
| 148 | + public UserProjectRollingStat create(Tuple2<Integer, String> key) { |
| 149 | + return new UserProjectRollingStat(key.f0, key.f1); |
| 150 | + } |
140 | 151 | } |
141 | 152 | } |
0 commit comments