用于云盒设备数据统计
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

104 lines
4.1 KiB

import cn.hutool.core.util.RandomUtil;
import cn.hutool.db.Db;
import com.qniao.iot.machine.event.MachineIotDataReceivedEvent;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.serialization.ByteArraySerializer;
import org.apache.kafka.common.serialization.StringSerializer;
import java.math.BigDecimal;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.Future;
public class SourceMockerDemo {
// 延迟:毫秒
public static final long DELAY = 1000;
public static void main(String[] args) throws Exception {
// 创建kafka配置属性
Properties kafkaProps = createKafkaProperties();
// 创建Kafka消息的生产者
KafkaProducer<String, byte[]> producer = new KafkaProducer<>(kafkaProps);
String topic = "test";
// 电源状态(0断电 1有电)
List<Integer> pwrStaList = Arrays.asList(0, 1);
// 数据源(0智慧云 1根云)
List<Integer> dataSource = Arrays.asList(0, 1);
// 设备工作状态(0停机 1工作 2待机)
List<Integer> accStaList = Arrays.asList(0, 1, 2);
Long currJobDuration = 231L;
long accJobCount = 2314234L;
// 循环发送事件
while (true) {
MachineIotDataReceivedEvent event = new MachineIotDataReceivedEvent();
event.setId(RandomUtil.randomLong(999999999999999L));
event.setMachineIotMac(861193040814171L);
event.setMachinePwrStat(RandomUtil.randomEles(pwrStaList, 1).get(0));
event.setMachineWorkingStat(RandomUtil.randomEles(accStaList, 1).get(0));
// 递增每次加一个随机数
event.setCurrJobDuration(currJobDuration = currJobDuration + RandomUtil.randomLong(99L));
// 递增每次加一个随机数
event.setCurrWaitingDuration(0L);
event.setIgStat(0);
event.setReceivedTime(LocalDateTime.now().toEpochSecond(ZoneOffset.of("+8")) * 1000);
event.setReportTime(LocalDateTime.now().toEpochSecond(ZoneOffset.of("+8")) * 1000);
// 递增加一个随机数
event.setCurrJobCount(currJobDuration = currJobDuration + RandomUtil.randomLong(99L));
// 基础值加CurrJobCount
event.setAccJobCount(accJobCount = accJobCount + RandomUtil.randomLong(99L));
event.setDataSource(1);
// 递增随机加一个数
event.setCurrStoppingDuration(0L);
if(event.getMachinePwrStat().equals(0)) {
event.setMachineWorkingStat(0);
}else {
event.setMachineWorkingStat(generateWorkingSta(RandomUtil.randomEles(accStaList, 1).get(0), event.getMachinePwrStat()));
}
ProducerRecord<String, byte[]> record = new RootCloudIotDataEventSerialization(topic).serialize(
event,
null);
Future<RecordMetadata> send = producer.send(record);
System.out.println(send.get());
Thread.sleep(DELAY);
}
}
private static Integer generateWorkingSta(Integer workingSta, Integer pwrSta) {
if(pwrSta.equals(0)) {
return 0;
}else {
return workingSta;
}
}
private static Properties createKafkaProperties() {
Properties kafkaProps = new Properties();
// 正式环境
kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "120.25.199.30:19092");
kafkaProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getCanonicalName());
kafkaProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getCanonicalName());
return kafkaProps;
}
}