Browse Source

更新

hph_优化版本
1049970895@qniao.cn 3 years ago
parent
commit
b85fae8d01
5 changed files with 44 additions and 94 deletions
  1. 6
      root-cloud-mocker/src/main/java/com/qniao/iot/rc/RootCloudIotDataEventSourceMocker.java
  2. 6
      root-cloud-statistics/dependency-reduced-pom.xml
  3. 18
      root-cloud-statistics/pom.xml
  4. 18
      root-cloud-statistics/src/main/java/com/qniao/iot/rc/RootCloudIotDataFormatterJob.java
  5. 90
      root-cloud-statistics/src/test/java/com/qniao/iot/rc/CloudBoxEventJob.java

6
root-cloud-mocker/src/main/java/com/qniao/iot/rc/RootCloudIotDataEventSourceMocker.java

@ -81,11 +81,11 @@ public class RootCloudIotDataEventSourceMocker {
private static Properties createKafkaProperties() {
Properties kafkaProps = new Properties();
// 本地环境
kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "SASL_PLAINTEXT://localhost:9093");
// kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:19093");
// 测试环境
//kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "172.29.115.145:9092");
// 正式环境
//kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "120.25.199.30:19092");
kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "120.25.199.30:19092");
kafkaProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getCanonicalName());
kafkaProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getCanonicalName());
@ -94,7 +94,7 @@ public class RootCloudIotDataEventSourceMocker {
kafkaProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
kafkaProps.put(SaslConfigs.SASL_MECHANISM, "PLAIN");
kafkaProps.put("sasl.jaas.config",
"org.apache.kafka.common.security.plain.PlainLoginModule required username=\"admin\" password=\"admin-secret\";");
"org.apache.kafka.common.security.plain.PlainLoginModule required username=\"qnkafka\" password=\"qnkafkaonetwogo\";");

6
root-cloud-statistics/dependency-reduced-pom.xml

@ -77,6 +77,12 @@
<version>2.17.2</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.1.12</version>
<scope>test</scope>
</dependency>
</dependencies>
<distributionManagement>
<repository>

18
root-cloud-statistics/pom.xml

@ -159,6 +159,24 @@ under the License.
<artifactId>mysql-connector-java</artifactId>
<version>8.0.29</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.31</version>
</dependency>
<dependency>
<groupId>com.qniao</groupId>
<artifactId>ddd-event</artifactId>
<version>0.0.1-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>io.netty</groupId>
<artifactId>netty-all</artifactId>
<version>4.1.42.Final</version>
</dependency>
</dependencies>
<build>

18
root-cloud-statistics/src/main/java/com/qniao/iot/rc/RootCloudIotDataFormatterJob.java

@ -39,10 +39,15 @@ import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.filesystem.StreamingFileSink;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.config.SaslConfigs;
import java.math.BigDecimal;
import java.util.Objects;
import java.security.Provider;
import java.util.*;
/**
* Skeleton for a Flink DataStream Job.
@ -74,7 +79,12 @@ public class RootCloudIotDataFormatterJob {
.setBootstrapServers(ApolloConfig.get(ConfigConstant.SOURCE_KAFKA_BOOTSTRAP_SERVERS))
.setTopics(ApolloConfig.get(ConfigConstant.SOURCE_KAFKA_TOPICS))
.setGroupId(ApolloConfig.get(ConfigConstant.SOURCE_KAFKA_GROUPID))
/*.setProperty(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT")
.setProperty(SaslConfigs.SASL_MECHANISM, "PLAIN")
.setProperty("sasl.jaas.config",
"org.apache.kafka.common.security.plain.PlainLoginModule required username=\"admin\" password=\"admin-secret\";")*/
.setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
//.setStartingOffsets(OffsetsInitializer.earliest())
.setValueOnlyDeserializer(new RootCloudIotDataReceiptedEventDeserializationSchema())
.build();
@ -84,10 +94,16 @@ public class RootCloudIotDataFormatterJob {
.map((MapFunction<RootCloudIotDataReceiptedEvent, MachineIotDataReceivedEvent>) RootCloudIotDataFormatterJob::transform)
.name("Transform MachineIotDataReceivedEvent");
Properties kafkaProducerConfig = new Properties();
kafkaProducerConfig.setProperty(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SASL_PLAINTEXT");
kafkaProducerConfig.setProperty(SaslConfigs.SASL_MECHANISM, "PLAIN");
kafkaProducerConfig.setProperty("sasl.jaas.config",
"org.apache.kafka.common.security.plain.PlainLoginModule required username=\"admin\" password=\"admin-secret\";");
// 写入kafka
transformDs.sinkTo(
KafkaSink.<MachineIotDataReceivedEvent>builder()
.setBootstrapServers(ApolloConfig.get(ConfigConstant.SINK_KAFKA_BOOTSTRAP_SERVERS))
//.setKafkaProducerConfig(kafkaProducerConfig)
.setRecordSerializer(
KafkaRecordSerializationSchema.builder()
.setTopic(ApolloConfig.get(ConfigConstant.SINK_KAFKA_TOPICS))

90
root-cloud-statistics/src/test/java/com/qniao/iot/rc/CloudBoxEventJob.java

@ -1,90 +0,0 @@
package com.qniao.iot.rc;
import cn.hutool.core.collection.ListUtil;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONUtil;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.RichFlatMapFunction;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import java.time.Duration;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.util.*;
public class CloudBoxEventJob {
public static void main(String[] args) throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.enableCheckpointing(60000L, CheckpointingMode.EXACTLY_ONCE);
env.getConfig().setAutoWatermarkInterval(1000L);
env.setParallelism(1);
/*Map<TopicPartition, Long> offsets = new HashMap<>();
TopicPartition topicPartition = new TopicPartition("data-message-channel-qn", 0);
offsets.put(topicPartition, 5872534L);*/
KafkaSource<CloudBoxDataHistoryEvent> source = KafkaSource.<CloudBoxDataHistoryEvent>builder()
.setBootstrapServers("172.19.14.225:9092")
.setTopics("data-message-channel-qn")
.setGroupId("cloud_box_event_job")
.setProperty(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "8000")
.setStartingOffsets(OffsetsInitializer.earliest())
.setValueOnlyDeserializer(new CloudBoxDataHistoryEventDeserializationSchema())
.build();
SingleOutputStreamOperator<CloudBoxDataHistoryEvent> fromSource = env
.fromSource(source, WatermarkStrategy.forBoundedOutOfOrderness(Duration.ofSeconds(1)),
"cloudBoxDataHistoryEvent fromSource")
.filter((FilterFunction<CloudBoxDataHistoryEvent>) value -> {
String eventKey = value.getEventKey();
return StrUtil.isNotEmpty(eventKey) && eventKey.equals("qn_cloud_box_data_history");
});
fromSource.print();
SingleOutputStreamOperator<Body> flatMap = fromSource
.flatMap(new RichFlatMapFunction<CloudBoxDataHistoryEvent, Body>() {
@Override
public void flatMap(CloudBoxDataHistoryEvent event, Collector<Body> out) {
String body = event.getBody();
if (StrUtil.isNotEmpty(body)) {
Body bean = JSONUtil.toBean(body, Body.class);
bean.setCurrentTime(LocalDateTime.now().toInstant(ZoneOffset.ofHours(8)).toEpochMilli());
out.collect(bean);
}
}
}).name("cloudBoxDataHistoryEvent flatmap");
SingleOutputStreamOperator<List<Body>> toMysql = flatMap
.assignTimestampsAndWatermarks(WatermarkStrategy.<Body>forBoundedOutOfOrderness(Duration.ofSeconds(1))
.withTimestampAssigner(((body, recordTimestamp) -> body.getCurrentTime())))
.keyBy(Body::getData_type)
.window(TumblingEventTimeWindows.of(Time.seconds(2)))
.process(new ProcessWindowFunction<Body, List<Body>, Integer, TimeWindow>() {
@Override
public void process(Integer aLong, ProcessWindowFunction<Body, List<Body>, Integer, TimeWindow>.Context context,
Iterable<Body> elements, Collector<List<Body>> out) {
List<Body> list = ListUtil.toList(elements);
if (list.size() > 0) {
out.collect(list);
}
}
}).name("to mysql");
toMysql.addSink(new SinkMysqlFunc()).name("sink to mysql");
env.execute("cloud box event job");
}
}
Loading…
Cancel
Save