正在显示
18 个修改的文件
包含
1190 行增加
和
57 行删除
| 1 | driverClassName=com.mysql.cj.jdbc.Driver | 1 | driverClassName=com.mysql.cj.jdbc.Driver |
| 2 | -url=jdbc:mysql://rm-wz9740un21f09iokuao.mysql.rds.aliyuncs.com:3306/liu_yu_le?useUnicode=true&characterEncoding=utf8&autoReconnect=true | ||
| 3 | -username=luhui | ||
| 4 | -password=Luhui586 | 2 | +#url=jdbc:mysql://rm-wz9740un21f09iokuao.mysql.rds.aliyuncs.com:3306/liu_yu_le?useUnicode=true&characterEncoding=utf8&autoReconnect=true |
| 3 | +#username=luhui | ||
| 4 | +#password=Luhui586 | ||
| 5 | +url=jdbc:mysql://192.168.31.133:3306/ly_sensor_data_2020?useUnicode=true&characterEncoding=utf8&autoReconnect=true | ||
| 6 | +username = luhui | ||
| 7 | +password = Luhui586 | ||
| 5 | #\u6700\u5927\u8FDE\u63A5\u6570\u91CF | 8 | #\u6700\u5927\u8FDE\u63A5\u6570\u91CF |
| 6 | maxActive=100 | 9 | maxActive=100 |
| 7 | #\u6700\u5927\u7A7A\u95F2\u8FDE\u63A5 | 10 | #\u6700\u5927\u7A7A\u95F2\u8FDE\u63A5 |
| @@ -57,5 +57,84 @@ | @@ -57,5 +57,84 @@ | ||
| 57 | <groupId>commons-pool</groupId> | 57 | <groupId>commons-pool</groupId> |
| 58 | <artifactId>commons-pool</artifactId> | 58 | <artifactId>commons-pool</artifactId> |
| 59 | </dependency> | 59 | </dependency> |
| 60 | + | ||
| 61 | + <dependency> | ||
| 62 | + <groupId>com.zendesk</groupId> | ||
| 63 | + <artifactId>mysql-binlog-connector-java</artifactId> | ||
| 64 | + </dependency> | ||
| 65 | + <dependency> | ||
| 66 | + <groupId>com.influxdb</groupId> | ||
| 67 | + <artifactId>influxdb-client-java</artifactId> | ||
| 68 | + <exclusions> | ||
| 69 | + <exclusion> | ||
| 70 | + <groupId>com.squareup.okhttp3</groupId> | ||
| 71 | + <artifactId>okhttp</artifactId> | ||
| 72 | + </exclusion> | ||
| 73 | + <exclusion> | ||
| 74 | + <groupId>com.squareup.okio</groupId> | ||
| 75 | + <artifactId>okio-jvm</artifactId> | ||
| 76 | + </exclusion> | ||
| 77 | + <exclusion> | ||
| 78 | + <groupId>com.squareup.okio</groupId> | ||
| 79 | + <artifactId>okio</artifactId> | ||
| 80 | + </exclusion> | ||
| 81 | + <exclusion> | ||
| 82 | + <groupId>com.squareup.okio</groupId> | ||
| 83 | + <artifactId>okio-parent</artifactId> | ||
| 84 | + </exclusion> | ||
| 85 | + </exclusions> | ||
| 86 | + </dependency> | ||
| 60 | </dependencies> | 87 | </dependencies> |
| 88 | + | ||
| 89 | + <build> | ||
| 90 | + <finalName>lh-data-file-service</finalName> | ||
| 91 | + <plugins> | ||
| 92 | + <plugin> | ||
| 93 | + <groupId>org.apache.maven.plugins</groupId> | ||
| 94 | + <artifactId>maven-jar-plugin</artifactId> | ||
| 95 | + <version>2.4</version> | ||
| 96 | + <configuration> | ||
| 97 | + <archive> | ||
| 98 | + <!-- | ||
| 99 | + 生成的jar中,不要包含pom.xml和pom.properties这两个文件 | ||
| 100 | + --> | ||
| 101 | + <addMavenDescriptor>false</addMavenDescriptor> | ||
| 102 | + <manifest> | ||
| 103 | + <!-- | ||
| 104 | + 是否要把第三方jar放到manifest的classpath中 | ||
| 105 | + --> | ||
| 106 | + <addClasspath>true</addClasspath> | ||
| 107 | + | ||
| 108 | + <!-- | ||
| 109 | + 生成的manifest中classpath的前缀,因为要把第三方jar放到lib目录下,所以classpath的前缀是lib/ | ||
| 110 | + --> | ||
| 111 | + <classpathPrefix>lib/</classpathPrefix> | ||
| 112 | + <mainClass>com.zhonglai.luhui.data.file.service.service.DataService</mainClass> | ||
| 113 | + </manifest> | ||
| 114 | + </archive> | ||
| 115 | + </configuration> | ||
| 116 | + </plugin> | ||
| 117 | + | ||
| 118 | + <!-- The configuration of maven-assembly-plugin --> | ||
| 119 | + <plugin> | ||
| 120 | + <groupId>org.apache.maven.plugins</groupId> | ||
| 121 | + <artifactId>maven-assembly-plugin</artifactId> | ||
| 122 | + <version>2.4</version> | ||
| 123 | + <configuration> | ||
| 124 | + <descriptors> | ||
| 125 | + <descriptor>${project.parent.parent.basedir}/configs/package.xml</descriptor> | ||
| 126 | + </descriptors> | ||
| 127 | + </configuration> | ||
| 128 | + <executions> | ||
| 129 | + <execution> | ||
| 130 | + <id>make-assembly</id> | ||
| 131 | + <phase>package</phase> | ||
| 132 | + <goals> | ||
| 133 | + <goal>single</goal> | ||
| 134 | + </goals> | ||
| 135 | + </execution> | ||
| 136 | + </executions> | ||
| 137 | + </plugin> | ||
| 138 | + </plugins> | ||
| 139 | + </build> | ||
| 61 | </project> | 140 | </project> |
| @@ -2,6 +2,7 @@ package com.zhonglai.luhui.data.file.service.service; | @@ -2,6 +2,7 @@ package com.zhonglai.luhui.data.file.service.service; | ||
| 2 | 2 | ||
| 3 | import cn.hutool.core.io.IORuntimeException; | 3 | import cn.hutool.core.io.IORuntimeException; |
| 4 | import cn.hutool.core.io.IoUtil; | 4 | import cn.hutool.core.io.IoUtil; |
| 5 | +import cn.hutool.core.io.file.FileReader; | ||
| 5 | import cn.hutool.core.io.file.FileWriter; | 6 | import cn.hutool.core.io.file.FileWriter; |
| 6 | import cn.hutool.core.util.CharsetUtil; | 7 | import cn.hutool.core.util.CharsetUtil; |
| 7 | import com.ruoyi.common.utils.DateUtils; | 8 | import com.ruoyi.common.utils.DateUtils; |
| @@ -14,6 +15,7 @@ import org.apache.commons.dbutils.BasicRowProcessor; | @@ -14,6 +15,7 @@ import org.apache.commons.dbutils.BasicRowProcessor; | ||
| 14 | import org.apache.commons.dbutils.GenerousBeanProcessor; | 15 | import org.apache.commons.dbutils.GenerousBeanProcessor; |
| 15 | import org.apache.commons.dbutils.handlers.BeanListHandler; | 16 | import org.apache.commons.dbutils.handlers.BeanListHandler; |
| 16 | import org.apache.commons.lang3.time.DateFormatUtils; | 17 | import org.apache.commons.lang3.time.DateFormatUtils; |
| 18 | +import org.springframework.util.FileCopyUtils; | ||
| 17 | 19 | ||
| 18 | import java.io.BufferedReader; | 20 | import java.io.BufferedReader; |
| 19 | import java.io.BufferedWriter; | 21 | import java.io.BufferedWriter; |
| @@ -25,6 +27,7 @@ import java.util.List; | @@ -25,6 +27,7 @@ import java.util.List; | ||
| 25 | import java.util.Map; | 27 | import java.util.Map; |
| 26 | import java.util.concurrent.TimeUnit; | 28 | import java.util.concurrent.TimeUnit; |
| 27 | 29 | ||
| 30 | + | ||
| 28 | /** | 31 | /** |
| 29 | * 数据服务 | 32 | * 数据服务 |
| 30 | */ | 33 | */ |
| @@ -40,56 +43,77 @@ public class DataService { | @@ -40,56 +43,77 @@ public class DataService { | ||
| 40 | String yea = day.substring(0,4); | 43 | String yea = day.substring(0,4); |
| 41 | 44 | ||
| 42 | String tableName = "`ly_sensor_data_"+yea+"`.`device_sensor_data_"+day+"`"; | 45 | String tableName = "`ly_sensor_data_"+yea+"`.`device_sensor_data_"+day+"`"; |
| 43 | - List<DeviceSensorData> ct = baseDao.findBysql(DeviceSensorData.class,"SELECT device_info_id,data_type FROM "+tableName+" GROUP BY device_info_id,data_type"); | ||
| 44 | - if(null != ct && ct.size() != 0) | 46 | + |
| 47 | + List<Map<String, Object>> list = baseDao.findBysql("SELECT COUNT(*) ct FROM "+tableName); | ||
| 48 | + if(null != list && list.size() !=0 ) | ||
| 45 | { | 49 | { |
| 46 | - for (DeviceSensorData deviceSensorData:ct) | 50 | + long ct = (long) list.get(0).get("ct"); |
| 51 | + | ||
| 52 | + long wancheng = 1; | ||
| 53 | + | ||
| 54 | + long jd = 0; | ||
| 55 | + | ||
| 56 | + int pagesize = 100000; | ||
| 57 | + int pageNo = 1; | ||
| 58 | + System.out.println(tableName); | ||
| 59 | + List<DeviceSensorData> deviceSensorDataList = getDeviceSensorDataList(tableName,pagesize,pageNo++); | ||
| 60 | + while (null != deviceSensorDataList && deviceSensorDataList.size() != 0 ) | ||
| 47 | { | 61 | { |
| 48 | - ScheduledUtil.scheduler.schedule(() -> { | ||
| 49 | - String imei = deviceSensorData.getDevice_info_id().split("_")[0]; | ||
| 50 | - String deviceInfoId = deviceSensorData.getDevice_info_id(); | ||
| 51 | - String dataType = deviceSensorData.getData_type(); | ||
| 52 | - String deviceType = FileUtil.deviceTypeMap.get(deviceSensorData.getDevice_info_id().split("_")[0]); | ||
| 53 | - | ||
| 54 | - List<DeviceSensorData> device_data_type_list = baseDao.findBysql(DeviceSensorData.class,"SELECT `creat_time`,`data_value` FROM "+tableName+" where device_info_id='"+deviceSensorData.getDevice_info_id()+"' and data_type='"+deviceSensorData.getData_type()+"'"); | ||
| 55 | - Statistics statistics = map.get(Integer.parseInt(yea)); | ||
| 56 | - if(null != device_data_type_list && device_data_type_list.size() != 0 ) | 62 | + for (DeviceSensorData deviceSensorData:deviceSensorDataList) |
| 63 | + { | ||
| 64 | + String baiduPath = FileUtil.createBaiduWangPanPat(yea,null==deviceSensorData.getDevice_model()?"device_model":deviceSensorData.getDevice_model(),deviceSensorData.getDevice_info_id().split("_")[0],deviceSensorData.getDevice_info_id(),deviceSensorData.getData_type()); | ||
| 65 | + File file = new File(FileUtil.tempFilePath+baiduPath); | ||
| 66 | + saveDataFile(deviceSensorData,file); | ||
| 67 | + | ||
| 68 | + long dqjd = (wancheng++*100)/ct; | ||
| 69 | + if(dqjd>jd) | ||
| 57 | { | 70 | { |
| 58 | - saveDataFile(device_data_type_list,imei,deviceInfoId,dataType,deviceType,yea,day); | ||
| 59 | - map.put(Integer.parseInt(yea),statistics.add()); | 71 | + jd = dqjd; |
| 60 | } | 72 | } |
| 61 | 73 | ||
| 62 | - System.out.println(yea+"年完成进度:"+statistics.getProgress()); | ||
| 63 | - },0, TimeUnit.SECONDS); | ||
| 64 | - | 74 | + System.out.print("\r"+"完成进度:"+jd+"%"); |
| 75 | + for (int i=0;i<jd;i++) | ||
| 76 | + { | ||
| 77 | + System.out.print("#"); | ||
| 78 | + } | ||
| 79 | + System.out.print("#"); | ||
| 80 | + } | ||
| 81 | + deviceSensorDataList = getDeviceSensorDataList(tableName,pagesize,pageNo++); | ||
| 65 | } | 82 | } |
| 83 | + System.out.println("完成"); | ||
| 84 | + | ||
| 66 | } | 85 | } |
| 67 | 86 | ||
| 68 | } | 87 | } |
| 69 | 88 | ||
| 70 | - private void saveDataFile(List<DeviceSensorData> device_data_type_list, String imei,String deviceInfoId,String dataType, String deviceType,String yea,String day) | 89 | + private List<DeviceSensorData> getDeviceSensorDataList(String tableName,int pagesize,int pageNo) |
| 90 | + { | ||
| 91 | + List<DeviceSensorData> device_data_list = baseDao.findBysql(DeviceSensorData.class,"SELECT * FROM "+tableName+" limit "+((pageNo-1)*pagesize)+","+pagesize); | ||
| 92 | + return device_data_list; | ||
| 93 | + } | ||
| 94 | + | ||
| 95 | + | ||
| 96 | + | ||
| 97 | + private void saveDataFile(DeviceSensorData deviceSensorData,File file) | ||
| 71 | { | 98 | { |
| 72 | - String baiduPath = FileUtil.createBaiduWangPanPat(yea,null==deviceType?"device_model":deviceType,imei,deviceInfoId,dataType+"+"+day); | ||
| 73 | - File file = new File(FileUtil.tempFilePath+baiduPath); | ||
| 74 | 99 | ||
| 75 | BufferedWriter bufferedWriter = null; | 100 | BufferedWriter bufferedWriter = null; |
| 76 | 101 | ||
| 77 | try { | 102 | try { |
| 78 | bufferedWriter = FileWriter.create(file, CharsetUtil.CHARSET_UTF_8).getWriter(true); | 103 | bufferedWriter = FileWriter.create(file, CharsetUtil.CHARSET_UTF_8).getWriter(true); |
| 104 | + | ||
| 79 | if (null != bufferedWriter) | 105 | if (null != bufferedWriter) |
| 80 | { | 106 | { |
| 81 | - for (DeviceSensorData deviceSensorData:device_data_type_list) | ||
| 82 | - { | ||
| 83 | - StringBuffer line = new StringBuffer(); | ||
| 84 | - | ||
| 85 | - line.append(deviceSensorData.getCreat_time()); | ||
| 86 | - line.append(","); | ||
| 87 | - line.append(deviceSensorData.getData_value()); | ||
| 88 | - bufferedWriter.write(line.toString()); | ||
| 89 | - //默认换行符 | ||
| 90 | - bufferedWriter.write(FileUtil.CRLF); | ||
| 91 | - bufferedWriter.flush(); | ||
| 92 | - } | 107 | + StringBuffer line = new StringBuffer(); |
| 108 | + | ||
| 109 | + line.append(deviceSensorData.getCreat_time()); | ||
| 110 | + line.append(","); | ||
| 111 | + line.append(deviceSensorData.getData_value()); | ||
| 112 | + bufferedWriter.write(line.toString()); | ||
| 113 | + //默认换行符 | ||
| 114 | + bufferedWriter.write(FileUtil.CRLF); | ||
| 115 | + bufferedWriter.flush(); | ||
| 116 | + | ||
| 93 | } | 117 | } |
| 94 | 118 | ||
| 95 | } catch (IOException e) { | 119 | } catch (IOException e) { |
| @@ -99,33 +123,67 @@ public class DataService { | @@ -99,33 +123,67 @@ public class DataService { | ||
| 99 | } | 123 | } |
| 100 | } | 124 | } |
| 101 | 125 | ||
| 102 | - private static Map<Integer, Statistics> map = new HashMap<>(); | ||
| 103 | - | ||
| 104 | - public static void main(String[] args) { | ||
| 105 | - FileUtil.initDeviceType(); | ||
| 106 | - DataService dataService = new DataService(); | 126 | + public void transformDataByDay(String dataday,int endyear) |
| 127 | + { | ||
| 107 | Calendar calendar = Calendar.getInstance(); | 128 | Calendar calendar = Calendar.getInstance(); |
| 108 | - int year = 2020; | ||
| 109 | - while (year<2024) | 129 | + |
| 130 | + int startyear = Integer.parseInt(dataday.substring(0,4)); | ||
| 131 | + int month = Calendar.JANUARY; | ||
| 132 | + if(dataday.length()>=6) | ||
| 133 | + { | ||
| 134 | + month = Integer.parseInt(dataday.substring(4,6))-1; | ||
| 135 | + } | ||
| 136 | + int date = 1; | ||
| 137 | + if(dataday.length()>=8) | ||
| 138 | + { | ||
| 139 | + date = Integer.parseInt(dataday.substring(6,8)); | ||
| 140 | + } | ||
| 141 | + while (startyear<endyear) | ||
| 110 | { | 142 | { |
| 111 | - calendar.set(year,Calendar.JANUARY,1); | ||
| 112 | - if (!map.containsKey(year)) | 143 | + calendar.set(startyear,month,date); |
| 144 | + if (!map.containsKey(startyear)) | ||
| 113 | { | 145 | { |
| 114 | Statistics statistics = new Statistics(); | 146 | Statistics statistics = new Statistics(); |
| 115 | statistics.setTotal(calendar.getActualMaximum(Calendar.DAY_OF_YEAR)); | 147 | statistics.setTotal(calendar.getActualMaximum(Calendar.DAY_OF_YEAR)); |
| 116 | statistics.setFinished(0); | 148 | statistics.setFinished(0); |
| 117 | - map.put(year,statistics); | 149 | + map.put(startyear,statistics); |
| 118 | } | 150 | } |
| 119 | - while (calendar.get(Calendar.YEAR)==year) | 151 | + while (calendar.get(Calendar.YEAR)==startyear) |
| 120 | { | 152 | { |
| 121 | String day = DateUtils.parseDateToStr("yyyyMMdd",calendar.getTime()); | 153 | String day = DateUtils.parseDateToStr("yyyyMMdd",calendar.getTime()); |
| 122 | - dataService.saveOneDayData(day); | ||
| 123 | -// ScheduledUtil.scheduler.schedule(() ->,0,TimeUnit.SECONDS); | 154 | + saveOneDayData(day); |
| 124 | calendar.add(Calendar.DAY_OF_MONTH, 1); | 155 | calendar.add(Calendar.DAY_OF_MONTH, 1); |
| 125 | } | 156 | } |
| 126 | - year++; | 157 | + startyear++; |
| 158 | + date = 1; | ||
| 159 | + month = Calendar.JANUARY; | ||
| 127 | } | 160 | } |
| 128 | 161 | ||
| 162 | + } | ||
| 129 | 163 | ||
| 164 | + public void saveerr() | ||
| 165 | + { | ||
| 166 | + File file = new File(FileUtil.tempFilePath+"/errtable.txt"); | ||
| 167 | + File copyfile = new File(FileUtil.tempFilePath+"/errtable1.txt"); | ||
| 168 | + try { | ||
| 169 | + FileCopyUtils.copy(file,copyfile); | ||
| 170 | + } catch (IOException e) { | ||
| 171 | + throw new RuntimeException(e); | ||
| 172 | + } | ||
| 173 | + file.delete(); | ||
| 174 | + | ||
| 175 | + List<String> list= FileReader.create(copyfile).readLines(); | ||
| 176 | + for (String tablename:list) | ||
| 177 | + { | ||
| 178 | + String day = tablename.substring(tablename.lastIndexOf("_")+1,tablename.lastIndexOf("`")); | ||
| 179 | + saveOneDayData(day); | ||
| 180 | + } | ||
| 181 | + } | ||
| 182 | + | ||
| 183 | + private static Map<Integer, Statistics> map = new HashMap<>(); | ||
| 184 | + | ||
| 185 | + public static void main(String[] args) { | ||
| 186 | + DataService dataService = new DataService(); | ||
| 187 | + dataService.transformDataByDay("20230117",2024); | ||
| 130 | } | 188 | } |
| 131 | } | 189 | } |
| 1 | +package com.zhonglai.luhui.data.file.service.service; | ||
| 2 | + | ||
| 3 | +import java.text.ParseException; | ||
| 4 | +import java.text.SimpleDateFormat; | ||
| 5 | +import java.time.Instant; | ||
| 6 | +import java.time.LocalDateTime; | ||
| 7 | +import java.time.OffsetDateTime; | ||
| 8 | +import java.time.ZoneOffset; | ||
| 9 | +import java.time.format.DateTimeFormatter; | ||
| 10 | +import java.util.*; | ||
| 11 | +import java.util.regex.Matcher; | ||
| 12 | +import java.util.regex.Pattern; | ||
| 13 | + | ||
| 14 | +import com.influxdb.client.InfluxDBClient; | ||
| 15 | +import com.influxdb.client.InfluxDBClientFactory; | ||
| 16 | +import com.influxdb.client.QueryApi; | ||
| 17 | +import com.influxdb.client.WriteApiBlocking; | ||
| 18 | +import com.influxdb.client.domain.Bucket; | ||
| 19 | +import com.influxdb.client.domain.DeletePredicateRequest; | ||
| 20 | +import com.influxdb.client.domain.Query; | ||
| 21 | +import com.influxdb.client.domain.WritePrecision; | ||
| 22 | +import com.influxdb.client.write.Point; | ||
| 23 | +import com.influxdb.query.FluxRecord; | ||
| 24 | +import com.influxdb.query.FluxTable; | ||
| 25 | +import com.ruoyi.common.utils.DateUtils; | ||
| 26 | +import com.ruoyi.common.utils.GsonConstructor; | ||
| 27 | +import com.zhonglai.dao.BaseDao; | ||
| 28 | +import com.zhonglai.luhui.data.file.service.dto.DeviceSensorData; | ||
| 29 | +import com.zhonglai.luhui.data.file.service.util.InfluxDBFluxExpression; | ||
| 30 | +import org.slf4j.Logger; | ||
| 31 | +import org.slf4j.LoggerFactory; | ||
| 32 | + | ||
| 33 | +public class InfluxDB2Service { | ||
| 34 | + | ||
| 35 | + private final Logger logger = LoggerFactory.getLogger(this.getClass()); | ||
| 36 | + | ||
| 37 | + private static BaseDao baseDao = new BaseDao(new DataDBFactoryImp()); | ||
| 38 | + | ||
| 39 | + private static final String token = "YjJgRuCDnypQV4pHlzoixvdoiv237ybVvZ8zzOBfLdbXPbzmYYRi2uWGzXONqqLllhVq3wm03lOF2pl0e3uQHQ=="; | ||
| 40 | + private static final String orgID = "dfed6796541746a2"; | ||
| 41 | + private static final String org = "luhui"; | ||
| 42 | + private static final String url = "http://192.168.31.133:8086"; | ||
| 43 | + | ||
| 44 | + private Map<String,String> bucketMap= new HashMap<>(); | ||
| 45 | + | ||
| 46 | + { | ||
| 47 | + InfluxDBClient influxDBClient = connect(); | ||
| 48 | + List<Bucket> list = influxDBClient.getBucketsApi().findBucketsByOrgName(org); | ||
| 49 | + close(influxDBClient); | ||
| 50 | + if(null != list && list.size() != 0) | ||
| 51 | + { | ||
| 52 | + for (Bucket bucket:list) | ||
| 53 | + { | ||
| 54 | + bucketMap.put(bucket.getName(),bucket.getId()); | ||
| 55 | + } | ||
| 56 | + } | ||
| 57 | + } | ||
| 58 | + | ||
| 59 | + /** | ||
| 60 | + * 建立连接 | ||
| 61 | + * @return | ||
| 62 | + */ | ||
| 63 | + private InfluxDBClient connect() | ||
| 64 | + { | ||
| 65 | + return connect(null); | ||
| 66 | + } | ||
| 67 | + | ||
| 68 | + /** | ||
| 69 | + * 建立连接 | ||
| 70 | + * @return | ||
| 71 | + */ | ||
| 72 | + private InfluxDBClient connect(Integer writeTimeOut ) | ||
| 73 | + { | ||
| 74 | + String connectUrl = url; | ||
| 75 | + if(null != writeTimeOut) | ||
| 76 | + { | ||
| 77 | + connectUrl = url + "?writeTimeout="+writeTimeOut; | ||
| 78 | + } | ||
| 79 | + InfluxDBClient client = InfluxDBClientFactory.create(connectUrl, token.toCharArray()); | ||
| 80 | + | ||
| 81 | + return client; | ||
| 82 | + } | ||
| 83 | + | ||
| 84 | + /** | ||
| 85 | + * 关闭连接 | ||
| 86 | + * @param client | ||
| 87 | + */ | ||
| 88 | + private void close(InfluxDBClient client) | ||
| 89 | + { | ||
| 90 | + if(null != client) | ||
| 91 | + { | ||
| 92 | + client.close(); | ||
| 93 | + } | ||
| 94 | + | ||
| 95 | + } | ||
| 96 | + | ||
| 97 | + /** | ||
| 98 | + * 检测存储桶是否存在 | ||
| 99 | + * @param bucketName | ||
| 100 | + * @return | ||
| 101 | + */ | ||
| 102 | + private boolean checkAndCreateBucket( String bucketName) { | ||
| 103 | + if (null != bucketMap.get(bucketName) && bucketMap.containsKey(bucketName)) | ||
| 104 | + { | ||
| 105 | + return true; | ||
| 106 | + } | ||
| 107 | + InfluxDBClient client = connect(); | ||
| 108 | + Bucket buckets = client.getBucketsApi().findBucketByName(bucketName); | ||
| 109 | + if (null == buckets) { | ||
| 110 | + // 没有设置任何保留策略,所以这个bucket的数据将被永久保留 | ||
| 111 | + buckets = client.getBucketsApi().createBucket(bucketName,orgID); | ||
| 112 | + | ||
| 113 | + logger.info("Bucket 创建成功: " + buckets.getName()); | ||
| 114 | + } | ||
| 115 | + close(client); | ||
| 116 | + bucketMap.put(bucketName,buckets.getId()); | ||
| 117 | + return true; | ||
| 118 | + } | ||
| 119 | + | ||
| 120 | + private Point deviceSensorDataToPoint( DeviceSensorData deviceSensorData) | ||
| 121 | + { | ||
| 122 | + String[] separationstr = separationDeviceInfoId(deviceSensorData.getDevice_info_id()); | ||
| 123 | + | ||
| 124 | + Point point = Point.measurement(separationstr[0]) | ||
| 125 | + .addTag("sn",separationstr[1]) | ||
| 126 | + .addTag("type",deviceSensorData.getData_type()) | ||
| 127 | + .time(deviceSensorData.getCreat_time(),WritePrecision.S) | ||
| 128 | + ; | ||
| 129 | + if(isInteger(deviceSensorData.getData_value())) | ||
| 130 | + { | ||
| 131 | + point.addField("value",Double.parseDouble(deviceSensorData.getData_value())); | ||
| 132 | + }else if(isDecimal(deviceSensorData.getData_value())) | ||
| 133 | + { | ||
| 134 | + point.addField("value",Double.parseDouble(deviceSensorData.getData_value())); | ||
| 135 | + } | ||
| 136 | + else { | ||
| 137 | + point .addField("value",deviceSensorData.getData_value()); | ||
| 138 | + } | ||
| 139 | + | ||
| 140 | + return point; | ||
| 141 | + } | ||
| 142 | + | ||
| 143 | + private String[] separationDeviceInfoId(String deviceInfoId) | ||
| 144 | + { | ||
| 145 | + int i = deviceInfoId.indexOf("_"); | ||
| 146 | + | ||
| 147 | + String measurementName = ""; | ||
| 148 | + String baseTag = ""; | ||
| 149 | + if(i>0) | ||
| 150 | + { | ||
| 151 | + measurementName = deviceInfoId.substring(0,i); | ||
| 152 | + baseTag = deviceInfoId.substring(i+1,deviceInfoId.length()); | ||
| 153 | + }else { | ||
| 154 | + measurementName = deviceInfoId; | ||
| 155 | + } | ||
| 156 | + | ||
| 157 | + return new String[]{measurementName,baseTag}; | ||
| 158 | + } | ||
| 159 | + | ||
| 160 | + public static boolean isInteger(String input) { | ||
| 161 | + Pattern pattern = Pattern.compile("^[-+]?\\d+$"); | ||
| 162 | + Matcher matcher = pattern.matcher(input); | ||
| 163 | + return matcher.matches(); | ||
| 164 | + } | ||
| 165 | + | ||
| 166 | + public static boolean isDecimal(String input) { | ||
| 167 | + Pattern pattern = Pattern.compile("^[-+]?\\d+\\.\\d+$"); | ||
| 168 | + Matcher matcher = pattern.matcher(input); | ||
| 169 | + return matcher.matches(); | ||
| 170 | + } | ||
| 171 | + | ||
| 172 | + public void writeData( String bucket,List<DeviceSensorData> dataList) | ||
| 173 | + { | ||
| 174 | + | ||
| 175 | + if(null == dataList && dataList.size()==0) | ||
| 176 | + { | ||
| 177 | + return ; | ||
| 178 | + } | ||
| 179 | + InfluxDBClient influxDBClient = connect(60000); | ||
| 180 | + WriteApiBlocking writeApi = influxDBClient.getWriteApiBlocking(); | ||
| 181 | + | ||
| 182 | + List<Point> saveList = new ArrayList<>(); | ||
| 183 | + for (DeviceSensorData deviceSensorData:dataList) | ||
| 184 | + { | ||
| 185 | + Point data = deviceSensorDataToPoint(deviceSensorData); | ||
| 186 | + saveList.add(data); | ||
| 187 | + } | ||
| 188 | + writeApi.writePoints(bucket, orgID, saveList); | ||
| 189 | + close(influxDBClient); | ||
| 190 | + } | ||
| 191 | + | ||
| 192 | + private void mysqlToInfluxDB(String databaseName, String tableName) | ||
| 193 | + { | ||
| 194 | + logger.info("开始时间:"+ LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); | ||
| 195 | + | ||
| 196 | + long time = System.currentTimeMillis(); | ||
| 197 | + List<Map<String, Object>> device_modellist = baseDao.findBysql("SELECT DISTINCT device_model FROM "+databaseName+"."+tableName); | ||
| 198 | + logger.info("查询设备类型用时:"+(System.currentTimeMillis()-time)/1000+"s"); | ||
| 199 | + | ||
| 200 | + if(null != device_modellist && device_modellist.size() != 0) | ||
| 201 | + { | ||
| 202 | + for (Map<String, Object> map:device_modellist) | ||
| 203 | + { | ||
| 204 | + String device_model = map.get("device_model")+""; | ||
| 205 | + checkAndCreateBucket(device_model); | ||
| 206 | + | ||
| 207 | + String sql = "select * from "+databaseName+"."+tableName +" where device_model='"+device_model+"'"; | ||
| 208 | + | ||
| 209 | + int pageNo = 1; | ||
| 210 | + int pageSize = 10000; | ||
| 211 | + time = System.currentTimeMillis(); | ||
| 212 | + List<DeviceSensorData> list = baseDao.findBysql(DeviceSensorData.class,sql+getlimit(pageNo++,pageSize)); | ||
| 213 | + writeData(device_model,list); | ||
| 214 | + logger.info("处理第"+(pageNo-1)+"页时间用时:"+(System.currentTimeMillis()-time)/1000+"s"); | ||
| 215 | + while (null != list && list.size()>0) | ||
| 216 | + { | ||
| 217 | + time = System.currentTimeMillis(); | ||
| 218 | + list = baseDao.findBysql(DeviceSensorData.class,sql+getlimit(pageNo++,pageSize)); | ||
| 219 | + writeData(device_model,list); | ||
| 220 | + logger.info("处理第"+(pageNo-1)+"页时间用时:"+(System.currentTimeMillis()-time)/1000+"s"); | ||
| 221 | + } | ||
| 222 | + } | ||
| 223 | + } | ||
| 224 | + logger.info("结束时间:"+ LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); | ||
| 225 | + } | ||
| 226 | + | ||
| 227 | + public void synchronousMysqlToInfluxDBByTime(String time) | ||
| 228 | + { | ||
| 229 | + | ||
| 230 | + Calendar calendar = Calendar.getInstance(); | ||
| 231 | + SimpleDateFormat simpleDateFormat = new SimpleDateFormat( "yyyyMMdd"); | ||
| 232 | + | ||
| 233 | + try { | ||
| 234 | + calendar.setTime(simpleDateFormat.parse(time)); | ||
| 235 | + } catch (ParseException e) { | ||
| 236 | + throw new RuntimeException(e); | ||
| 237 | + } | ||
| 238 | + | ||
| 239 | + while (System.currentTimeMillis()-calendar.getTime().getTime()>=86400000) //一天前 | ||
| 240 | + { | ||
| 241 | + Integer yea = calendar.get(Calendar.YEAR); | ||
| 242 | + String day = simpleDateFormat.format(calendar.getTime()); | ||
| 243 | + | ||
| 244 | + String databaseName = "`ly_sensor_data_"+yea+"`"; | ||
| 245 | + String tableName = "`device_sensor_data_"+day+"`"; | ||
| 246 | + | ||
| 247 | + logger.info(databaseName+"."+tableName); | ||
| 248 | + mysqlToInfluxDB(databaseName,tableName); | ||
| 249 | + | ||
| 250 | + calendar.add(Calendar.DATE,1); | ||
| 251 | + } | ||
| 252 | + } | ||
| 253 | + | ||
| 254 | + private String getlimit(int pageNo,int pageSize) | ||
| 255 | + { | ||
| 256 | + String limint = " limit " + (pageNo - 1) * pageSize + "," + pageSize; | ||
| 257 | + logger.info(limint); | ||
| 258 | + return limint; | ||
| 259 | + } | ||
| 260 | + | ||
| 261 | + public String getSenserData(String deviceInfoId,String dataType,Integer startTime,Integer endTime) | ||
| 262 | + { | ||
| 263 | + InfluxDBClient influxDBClient = connect(60000); | ||
| 264 | + QueryApi queryApi = influxDBClient.getQueryApi(); | ||
| 265 | + | ||
| 266 | + String[] separationstr = separationDeviceInfoId(deviceInfoId); | ||
| 267 | + | ||
| 268 | + List<FluxTable> list = queryApi.query("select * from `"+separationstr[0]+"` where sn='"+separationstr[1]+"' and type='"+dataType+"' and time>="+startTime*1000l*1000l*1000l+" and time<"+endTime*1000l*1000l*1000l,org); | ||
| 269 | + | ||
| 270 | + close(influxDBClient); | ||
| 271 | + return ""; | ||
| 272 | + } | ||
| 273 | + | ||
| 274 | + /** | ||
| 275 | + * 查询所有数据库 | ||
| 276 | + * @return | ||
| 277 | + */ | ||
| 278 | + public List<Bucket> findBuckets() | ||
| 279 | + { | ||
| 280 | + InfluxDBClient influxDBClient = connect(); | ||
| 281 | + List<Bucket> buckets = influxDBClient.getBucketsApi().findBuckets(); | ||
| 282 | + close(influxDBClient); | ||
| 283 | + return buckets; | ||
| 284 | + } | ||
| 285 | + | ||
| 286 | + /** | ||
| 287 | + * 查询数据库下所有表 | ||
| 288 | + * @param bucket | ||
| 289 | + * @return | ||
| 290 | + */ | ||
| 291 | + public List<FluxTable> findMeasurements(String bucket) | ||
| 292 | + { | ||
| 293 | + InfluxDBClient influxDBClient = connect(); | ||
| 294 | + List<FluxTable> tables = influxDBClient.getQueryApi().query("import \"influxdata/influxdb/v1\"\n" | ||
| 295 | + + "v1.measurements(bucket:\""+bucket+"\")", org); | ||
| 296 | + close(influxDBClient); | ||
| 297 | + return tables; | ||
| 298 | + } | ||
| 299 | + | ||
| 300 | + public void queryMeasurementDataWithPaging(String bucket,String measurement, int limit, int offset) { | ||
| 301 | + InfluxDBClient influxDBClient = connect(); | ||
| 302 | + QueryApi queryApi = influxDBClient.getQueryApi(); | ||
| 303 | + String flux = String.format("from(bucket:\""+bucket+"\") |> range(start: -5y) |> filter(fn: (r) => r._measurement == \"%s\") |> last() ", measurement, limit, offset); | ||
| 304 | + List<FluxTable> tables = queryApi.query(flux, org); | ||
| 305 | + | ||
| 306 | + for (FluxTable table : tables) { | ||
| 307 | + List<FluxRecord> records = table.getRecords(); | ||
| 308 | + for (FluxRecord record : records) { | ||
| 309 | + System.out.println(record.getTime() + ": " + record.getValue()); | ||
| 310 | + } | ||
| 311 | + } | ||
| 312 | + close(influxDBClient); | ||
| 313 | + } | ||
| 314 | + | ||
| 315 | + /** | ||
| 316 | + * 带时区信息的UTC格式 | ||
| 317 | + */ | ||
| 318 | + public static final String UTC_ZONE_FORMATER = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"; | ||
| 319 | + | ||
| 320 | + | ||
| 321 | + /** | ||
| 322 | + *查询数据 | ||
| 323 | + */ | ||
| 324 | + public void select(String bucketName, String tableName,Integer start,Integer stop){ | ||
| 325 | + InfluxDBClient influxDBClient = connect(); | ||
| 326 | + StringBuffer stringBuilder = new StringBuffer(); | ||
| 327 | + InfluxDBFluxExpression.appendCommonFlux(stringBuilder, bucketName, tableName, Instant.ofEpochMilli(start*1000l).toString(), Instant.ofEpochMilli(stop*1000l).toString()); | ||
| 328 | +// InfluxDBFluxExpression.appendTagFlux(stringBuilder, map.get("sn").toString()); | ||
| 329 | +// InfluxDBFluxExpression.appendTimeShiftFlux(stringBuilder); | ||
| 330 | + logger.info("查询sql :{}", stringBuilder.toString()); | ||
| 331 | + // 通过时间分组 查询时间段的数据 | ||
| 332 | + List<FluxTable> tables = influxDBClient.getQueryApi().query(stringBuilder.toString(),org); | ||
| 333 | + List<Map<String, Object>> list = new ArrayList<>(); | ||
| 334 | + for (FluxTable table : tables) { | ||
| 335 | + List<FluxRecord> records = table.getRecords(); | ||
| 336 | + for (FluxRecord record : records) { | ||
| 337 | + logger.info("{}---{}---{}---{}", record.getMeasurement(),record.getField(),record.getValue(),record.getTime()); | ||
| 338 | + } | ||
| 339 | + } | ||
| 340 | + close(influxDBClient); | ||
| 341 | + } | ||
| 342 | + | ||
| 343 | + | ||
| 344 | + public void queryData(String bucket,String tableName,Integer start, Integer stop) { | ||
| 345 | + InfluxDBClient influxDBClient = connect(); | ||
| 346 | + String predicate = "_measurement=\""+tableName+"\""; | ||
| 347 | + QueryApi queryApi = influxDBClient.getQueryApi(); | ||
| 348 | + String query = String.format("from(bucket:\"%s\") " + | ||
| 349 | + "|> range(start: %s, stop: %s) " + | ||
| 350 | + "|> filter(fn: (r) => %s)", bucket, OffsetDateTime.ofInstant(Instant.ofEpochMilli(start*1000l), ZoneOffset.UTC), OffsetDateTime.ofInstant(Instant.ofEpochMilli(stop*1000l), ZoneOffset.UTC), predicate); | ||
| 351 | + | ||
| 352 | + logger.info("查询:"+query); | ||
| 353 | + List<FluxTable> tables = queryApi.query(query, org); | ||
| 354 | + | ||
| 355 | + for (FluxTable table : tables) { | ||
| 356 | + List<FluxRecord> records = table.getRecords(); | ||
| 357 | + for (FluxRecord record : records) { | ||
| 358 | + System.out.println(record.getTime() + ": " + record.getValue()); | ||
| 359 | + } | ||
| 360 | + } | ||
| 361 | + close(influxDBClient); | ||
| 362 | + } | ||
| 363 | + | ||
| 364 | + /** | ||
| 365 | + * 删除数据 | ||
| 366 | + */ | ||
| 367 | + public void delete(String bucketName, String tableName,Integer start,Integer stop) { | ||
| 368 | + InfluxDBClient influxDBClient = connect(); | ||
| 369 | + String predicate = "_measurement=\""+tableName+"\""; | ||
| 370 | + influxDBClient.getDeleteApi().delete( OffsetDateTime.ofInstant(Instant.ofEpochMilli(start*1000l), ZoneOffset.UTC), | ||
| 371 | + OffsetDateTime.ofInstant(Instant.ofEpochMilli(stop*1000l), ZoneOffset.UTC), | ||
| 372 | + predicate,bucketName, org); | ||
| 373 | + close(influxDBClient); | ||
| 374 | + } | ||
| 375 | + public static void main(String[] args) { | ||
| 376 | + InfluxDB2Service influxDB2Service = new InfluxDB2Service(); | ||
| 377 | +// influxDB2Service.delete("6_W","865501049001200",1580918400,1581091200); | ||
| 378 | + influxDB2Service.select("6_W","865501049001200",1580745600,1580832000); | ||
| 379 | + | ||
| 380 | + } | ||
| 381 | + | ||
| 382 | + | ||
| 383 | +} |
| 1 | +package com.zhonglai.luhui.data.file.service.service; | ||
| 2 | + | ||
| 3 | +import com.alibaba.fastjson.JSON; | ||
| 4 | +import com.alibaba.fastjson.JSONObject; | ||
| 5 | +import com.github.shyiko.mysql.binlog.BinaryLogClient; | ||
| 6 | +import com.github.shyiko.mysql.binlog.event.*; | ||
| 7 | +import com.github.shyiko.mysql.binlog.event.deserialization.EventDeserializer; | ||
| 8 | + | ||
| 9 | +import java.io.IOException; | ||
| 10 | +import java.io.Serializable; | ||
| 11 | +import java.util.Arrays; | ||
| 12 | +import java.util.List; | ||
| 13 | +import java.util.Map; | ||
| 14 | +import java.util.Objects; | ||
| 15 | + | ||
| 16 | +public class MysqlBinlogService { | ||
| 17 | + public static void main(String[] args) { | ||
| 18 | + // 这里的账号必须要有权限访问 | ||
| 19 | + BinaryLogClient client = new BinaryLogClient("rm-wz9740un21f09iokuao.mysql.rds.aliyuncs.com", 3306, "luhui", "Luhui586"); | ||
| 20 | + // 反序列化配置 | ||
| 21 | + EventDeserializer eventDeserializer = new EventDeserializer(); | ||
| 22 | + eventDeserializer.setCompatibilityMode(EventDeserializer.CompatibilityMode.DATE_AND_TIME_AS_LONG | ||
| 23 | +// EventDeserializer.CompatibilityMode.CHAR_AND_BINARY_AS_BYTE_ARRAY | ||
| 24 | + ); | ||
| 25 | + // 设置反序列化配置 | ||
| 26 | + client.setEventDeserializer(eventDeserializer); | ||
| 27 | + // 设置自己的client作为服务器的id | ||
| 28 | + client.setServerId(3); | ||
| 29 | + // 可选,设置start fileName+position | ||
| 30 | +// client.setBinlogFilename("master-bin.000080"); | ||
| 31 | +// client.setBinlogPosition(219); | ||
| 32 | + | ||
| 33 | + client.registerEventListener(event -> { | ||
| 34 | + EventData data = event.getData(); | ||
| 35 | + String tableName; | ||
| 36 | + if (data instanceof TableMapEventData) { | ||
| 37 | + System.out.println("Table:"); | ||
| 38 | + TableMapEventData tableMapEventData = (TableMapEventData) data; | ||
| 39 | + System.out.println(tableMapEventData.getTableId() + ": [" + tableMapEventData.getDatabase() + "." + tableMapEventData.getTable() + "]"); | ||
| 40 | + tableName = tableMapEventData.getTable(); | ||
| 41 | + // 如果是不处理的表,那么返回 | ||
| 42 | + if (!Objects.equals(tableName, "student")) | ||
| 43 | + return; | ||
| 44 | + } | ||
| 45 | + if (data instanceof UpdateRowsEventData) { | ||
| 46 | +// System.out.println("Update:"); | ||
| 47 | +// System.out.println(data); | ||
| 48 | + // 获取对应的操作对象的json化数据 | ||
| 49 | + UpdateRowsEventData udata = (UpdateRowsEventData) data; | ||
| 50 | + List<Map.Entry<Serializable[], Serializable[]>> rows = udata.getRows(); | ||
| 51 | + for (Map.Entry<Serializable[], Serializable[]> row : rows) { | ||
| 52 | + List<Serializable> entries = Arrays.asList(row.getValue()); | ||
| 53 | + JSONObject dataObject = getDataObject(entries); | ||
| 54 | + System.out.println(dataObject); | ||
| 55 | + } | ||
| 56 | + } else if (data instanceof WriteRowsEventData) { | ||
| 57 | + WriteRowsEventData wData = new WriteRowsEventData(); | ||
| 58 | + wData.getIncludedColumns(); | ||
| 59 | + wData.getRows(); | ||
| 60 | + | ||
| 61 | + System.out.println("Insert:"); | ||
| 62 | + System.out.println(data); | ||
| 63 | + } else if (data instanceof DeleteRowsEventData) { | ||
| 64 | + System.out.println("Delete:"); | ||
| 65 | + System.out.println(data); | ||
| 66 | + } | ||
| 67 | + }); | ||
| 68 | + try { | ||
| 69 | + client.connect(); | ||
| 70 | + } catch (IOException e) { | ||
| 71 | + e.printStackTrace(); | ||
| 72 | + } | ||
| 73 | + } | ||
| 74 | + | ||
| 75 | + | ||
| 76 | + /** | ||
| 77 | + * 根据message获取对象 | ||
| 78 | + */ | ||
| 79 | + private static JSONObject getDataObject(List<Serializable> message) { | ||
| 80 | + JSONObject resultObject = new JSONObject(); | ||
| 81 | + String format = "{\"id\":\"0\",\"name\":\"1\",\"age\":\"2\",\"code\":\"3\"}"; | ||
| 82 | + JSONObject json = JSON.parseObject(format); | ||
| 83 | + for (String key : json.keySet()) { | ||
| 84 | + resultObject.put(key, message.get(json.getInteger(key))); | ||
| 85 | + } | ||
| 86 | + return resultObject; | ||
| 87 | + } | ||
| 88 | + | ||
| 89 | +} |
| @@ -24,7 +24,7 @@ import java.util.concurrent.TimeUnit; | @@ -24,7 +24,7 @@ import java.util.concurrent.TimeUnit; | ||
| 24 | public class FileUtil { | 24 | public class FileUtil { |
| 25 | private static String LvLianFilePath = "D:/data/ly_sensor_data"; | 25 | private static String LvLianFilePath = "D:/data/ly_sensor_data"; |
| 26 | private static String BaiDuWangPanFilePath = "/禄辉/ly_sensor_data"; | 26 | private static String BaiDuWangPanFilePath = "/禄辉/ly_sensor_data"; |
| 27 | - public static String tempFilePath = "F:/data"; | 27 | + public static String tempFilePath = System.getProperty("user.dir")+"/data"; |
| 28 | public static String CRLF = "\r\n"; | 28 | public static String CRLF = "\r\n"; |
| 29 | /** | 29 | /** |
| 30 | * 读取绿联云的文件 | 30 | * 读取绿联云的文件 |
| 1 | +package com.zhonglai.luhui.data.file.service.util; | ||
| 2 | + | ||
| 3 | +import org.apache.commons.lang3.StringUtils; | ||
| 4 | + | ||
| 5 | +import java.util.*; | ||
| 6 | +import java.util.Map.Entry; | ||
| 7 | + | ||
| 8 | +/** | ||
| 9 | + * @Description: | ||
| 10 | + * @author: skies | ||
| 11 | + * @date: | ||
| 12 | + */ | ||
| 13 | +public class InfluxDBFluxExpression { | ||
| 14 | + | ||
| 15 | + /** | ||
| 16 | + * 通用表达式 | ||
| 17 | + * | ||
| 18 | + * @param buffer | ||
| 19 | + * @param bucketName 名称 | ||
| 20 | + * @param tableName 表名 | ||
| 21 | + * @param start 开始时间 | ||
| 22 | + * @param stop 结束时间 | ||
| 23 | + */ | ||
| 24 | + public static void appendCommonFlux(StringBuffer buffer, String bucketName, String tableName, | ||
| 25 | + String start, String stop) { | ||
| 26 | + appendBucketFlux(buffer, bucketName); | ||
| 27 | + appendTimeRangeFlux(buffer, start, stop); | ||
| 28 | + appendTableFlux(buffer, tableName); | ||
| 29 | +// if(timestampFlag) { | ||
| 30 | +// appendTimestampFlux(buffer); | ||
| 31 | +// } | ||
| 32 | +// if(dropDefaultFlag) { | ||
| 33 | +// appendDropFlux(buffer); | ||
| 34 | +// } | ||
| 35 | + | ||
| 36 | + } | ||
| 37 | + | ||
| 38 | + | ||
| 39 | + /** | ||
| 40 | + * 数据源(桶)表达式 | ||
| 41 | + * | ||
| 42 | + * @param buffer | ||
| 43 | + * @param bucketName 名称 | ||
| 44 | + */ | ||
| 45 | + public static void appendBucketFlux(StringBuffer buffer, String bucketName) { | ||
| 46 | + buffer.append("from(bucket: \"" + bucketName + "\") "); | ||
| 47 | + } | ||
| 48 | + | ||
| 49 | + /** | ||
| 50 | + * 表名表达式 | ||
| 51 | + * | ||
| 52 | + * @param buffer | ||
| 53 | + * @param tableName 名称 | ||
| 54 | + */ | ||
| 55 | + public static void appendTableFlux(StringBuffer buffer, String tableName) { | ||
| 56 | + buffer.append("|> filter(fn: (r) => r._measurement == \"" + tableName + "\") "); | ||
| 57 | + } | ||
| 58 | + | ||
| 59 | + /** | ||
| 60 | + * 表名表达式 | ||
| 61 | + * | ||
| 62 | + * @param buffer | ||
| 63 | + * @param tag 名称 | ||
| 64 | + */ | ||
| 65 | + public static void appendTagFlux(StringBuffer buffer, String tag) { | ||
| 66 | + buffer.append("|> filter(fn: (r) => r.tag == \"" + tag + "\") "); | ||
| 67 | + } | ||
| 68 | + | ||
| 69 | + /** | ||
| 70 | + * field表达式 | ||
| 71 | + * | ||
| 72 | + * @param buffer | ||
| 73 | + * @param field 名称 | ||
| 74 | + */ | ||
| 75 | + public static void appendTagField(StringBuffer buffer, String field) { | ||
| 76 | + buffer.append("|> filter(fn: (r) => r._field == \"" + field + "\") "); | ||
| 77 | + } | ||
| 78 | + | ||
| 79 | + | ||
| 80 | + /** | ||
| 81 | + * 时间范围表达式 UTC时间 | ||
| 82 | + * | ||
| 83 | + * @param buffer | ||
| 84 | + * @param start 开始时间 | ||
| 85 | + * @param stop 结束时间 | ||
| 86 | + */ | ||
| 87 | + public static void appendTimeRangeFlux(StringBuffer buffer, String start, String stop) { | ||
| 88 | + if (StringUtils.isBlank(start)) { | ||
| 89 | + start = "1970-01-01T00:00:00.000Z"; | ||
| 90 | + } | ||
| 91 | + if (StringUtils.isBlank(stop)) { | ||
| 92 | + buffer.append("|> range(start:" + start + ") "); | ||
| 93 | + } else { | ||
| 94 | + buffer.append("|> range(start:" + start + ", stop:" + stop + ") "); | ||
| 95 | + } | ||
| 96 | + } | ||
| 97 | + | ||
| 98 | + /** | ||
| 99 | + * 删除字段表达式 | ||
| 100 | + * | ||
| 101 | + * @param buffer | ||
| 102 | + * @param args 需要删除的字段【 参数为空的话删除host字段】 | ||
| 103 | + */ | ||
| 104 | + public static void appendDropFlux(StringBuffer buffer, String... args) { | ||
| 105 | + if (args.length == 0) { | ||
| 106 | + buffer.append("|> drop(columns: [\"host\"]) "); | ||
| 107 | + return; | ||
| 108 | + } | ||
| 109 | + buffer.append("|> drop(columns: ["); | ||
| 110 | + for (int i = 0; i < args.length; i++) { | ||
| 111 | + if (i != 0) { | ||
| 112 | + buffer.append(","); | ||
| 113 | + } | ||
| 114 | + buffer.append("\"" + args[i] + "\""); | ||
| 115 | + } | ||
| 116 | + buffer.append("]) "); | ||
| 117 | + } | ||
| 118 | + | ||
| 119 | + /** | ||
| 120 | + * 复制属性列表达式 | ||
| 121 | + * | ||
| 122 | + * @param buffer | ||
| 123 | + * @param oldField 原来的字段名称 | ||
| 124 | + * @param newField 新的字段名称 | ||
| 125 | + */ | ||
| 126 | + public static void appendDuplicateFlux(StringBuffer buffer, String oldField, String newField) { | ||
| 127 | + buffer.append("|> duplicate(column: \"" + oldField + "\", as: \"" + newField + "\") "); | ||
| 128 | + } | ||
| 129 | + | ||
| 130 | + /** | ||
| 131 | + * 重命名属性列表达式 | ||
| 132 | + * | ||
| 133 | + * @param buffer | ||
| 134 | + * @param oldField 原来的字段名称 | ||
| 135 | + * @param newField 新的字段名称 | ||
| 136 | + */ | ||
| 137 | + public static void appendRenameFlux(StringBuffer buffer, String oldField, String newField) { | ||
| 138 | + buffer.append(" |> rename(columns: {" + oldField + ": \"" + newField + "\"}) "); | ||
| 139 | + } | ||
| 140 | + | ||
| 141 | + | ||
| 142 | + /** | ||
| 143 | + * 最新一条数据表达式 | ||
| 144 | + * | ||
| 145 | + * @param buffer | ||
| 146 | + */ | ||
| 147 | + public static void appendLastFlux(StringBuffer buffer) { | ||
| 148 | + buffer.append("|> last() "); | ||
| 149 | + } | ||
| 150 | + | ||
| 151 | + /** | ||
| 152 | + * 分页查询 | ||
| 153 | + * | ||
| 154 | + * @param buffer | ||
| 155 | + * @param n | ||
| 156 | + * @param offset | ||
| 157 | + */ | ||
| 158 | + public static void appendLimitFlux(StringBuffer buffer, int n, int offset) { | ||
| 159 | + buffer.append("|> limit(n:" + n + ", offset: " + offset + ") "); | ||
| 160 | + } | ||
| 161 | + | ||
| 162 | + /** | ||
| 163 | + * 分组表达式 | ||
| 164 | + * | ||
| 165 | + * @param buffer | ||
| 166 | + */ | ||
| 167 | + public static void appendGroupFlux(StringBuffer buffer, String... columns) { | ||
| 168 | + if (columns.length == 0) { | ||
| 169 | + buffer.append("|> group() "); | ||
| 170 | + } else { | ||
| 171 | + buffer.append("|> group(columns:[ "); | ||
| 172 | + for (int i = 0; i < columns.length; i++) { | ||
| 173 | + if (i != 0) { | ||
| 174 | + buffer.append(","); | ||
| 175 | + } | ||
| 176 | + buffer.append("\"" + columns[i] + "\""); | ||
| 177 | + } | ||
| 178 | + buffer.append("]) "); | ||
| 179 | + } | ||
| 180 | + | ||
| 181 | + } | ||
| 182 | + | ||
| 183 | + /** | ||
| 184 | + * 去重表达式 | ||
| 185 | + * | ||
| 186 | + * @param buffer | ||
| 187 | + */ | ||
| 188 | + public static void appendDistinctFlux(StringBuffer buffer, String... columns) { | ||
| 189 | + if (columns.length == 0) { | ||
| 190 | + buffer.append("|> distinct() "); | ||
| 191 | + } else { | ||
| 192 | + buffer.append("|> distinct(column:\"" + columns[0] + "\") "); | ||
| 193 | + } | ||
| 194 | + | ||
| 195 | + } | ||
| 196 | + | ||
| 197 | + /** | ||
| 198 | + * 总数表达式 | ||
| 199 | + * | ||
| 200 | + * @param buffer | ||
| 201 | + */ | ||
| 202 | + public static void appendCountFlux(StringBuffer buffer) { | ||
| 203 | + buffer.append("|> count() "); | ||
| 204 | + } | ||
| 205 | + | ||
| 206 | + /** | ||
| 207 | + * 前几条数据 | ||
| 208 | + * | ||
| 209 | + * @param buffer | ||
| 210 | + * @param n | ||
| 211 | + */ | ||
| 212 | + public static void appendTopFlux(StringBuffer buffer, int n) { | ||
| 213 | + buffer.append("|> top(n:" + n + ") "); | ||
| 214 | + } | ||
| 215 | + | ||
| 216 | + public static void appendBottomFlux(StringBuffer buffer, int n) { | ||
| 217 | + buffer.append("|> bottom(n:" + n + ") "); | ||
| 218 | + } | ||
| 219 | + | ||
| 220 | + /** | ||
| 221 | + * 排序 | ||
| 222 | + * | ||
| 223 | + * @param buffer | ||
| 224 | + * @param descFlag true 降序 ;false 升序 | ||
| 225 | + * @param columns | ||
| 226 | + */ | ||
| 227 | + public static void appendSortFlux(StringBuffer buffer, boolean descFlag, String... columns) { | ||
| 228 | + if (columns.length == 0) { | ||
| 229 | + buffer.append("|> sort(columns: [\"_value\"], desc: " + descFlag + ")"); | ||
| 230 | + } else { | ||
| 231 | + buffer.append("|> sort(columns:[ "); | ||
| 232 | + for (int i = 0; i < columns.length; i++) { | ||
| 233 | + if (i != 0) { | ||
| 234 | + buffer.append(","); | ||
| 235 | + } | ||
| 236 | + buffer.append("\"" + columns[i] + "\""); | ||
| 237 | + } | ||
| 238 | + buffer.append("], desc: " + descFlag + ") "); | ||
| 239 | + } | ||
| 240 | + } | ||
| 241 | + | ||
| 242 | + | ||
| 243 | + /** | ||
| 244 | + * 时移八小时 | ||
| 245 | + * | ||
| 246 | + * @param buffer | ||
| 247 | + */ | ||
| 248 | + public static void appendTimeShiftFlux(StringBuffer buffer) { | ||
| 249 | + buffer.append("|> timeShift(duration: 8h) "); | ||
| 250 | + } | ||
| 251 | + | ||
| 252 | + /** | ||
| 253 | + * 过滤单个字符表达式 | ||
| 254 | + * | ||
| 255 | + * @param buffer | ||
| 256 | + * @param list | ||
| 257 | + * @param operator 【== != 】 | ||
| 258 | + * @param join 【and or】 | ||
| 259 | + * @param fieldName | ||
| 260 | + */ | ||
| 261 | + public static void appendFilterFlux(StringBuffer buffer, List<String> list, String operator, String join, String fieldName) { | ||
| 262 | + if (list == null || list.size() == 0) { | ||
| 263 | + return; | ||
| 264 | + } | ||
| 265 | + for (int i = 0, size = list.size(); i < size; i++) { | ||
| 266 | + if (i == 0) { | ||
| 267 | + buffer.append("|> filter(fn: (r) =>"); | ||
| 268 | + } else { | ||
| 269 | + buffer.append(join); | ||
| 270 | + } | ||
| 271 | + buffer.append(" r." + fieldName + " " + operator + " \"" + list.get(i) + "\" "); | ||
| 272 | + } | ||
| 273 | + buffer.append(") "); | ||
| 274 | + } | ||
| 275 | + | ||
| 276 | + /** | ||
| 277 | + * 过滤表达式 | ||
| 278 | + * | ||
| 279 | + * @param buffer | ||
| 280 | + * @param map | ||
| 281 | + * @param operator 【== != 】 | ||
| 282 | + * @param join 【and or】 | ||
| 283 | + */ | ||
| 284 | + public static void appendFilterFlux(StringBuffer buffer, Map<String, Object> map, String operator, String join) { | ||
| 285 | + Set<Entry<String, Object>> entrySet = map.entrySet(); | ||
| 286 | + Iterator<Entry<String, Object>> iterator = entrySet.iterator(); | ||
| 287 | + boolean flag = true; | ||
| 288 | + while (iterator.hasNext()) { | ||
| 289 | + Entry<String, Object> next = iterator.next(); | ||
| 290 | + String key = next.getKey(); | ||
| 291 | + Object value = next.getValue(); | ||
| 292 | + if (flag) { | ||
| 293 | + buffer.append("|> filter(fn: (r) =>"); | ||
| 294 | + flag = false; | ||
| 295 | + } else { | ||
| 296 | + buffer.append(join); | ||
| 297 | + } | ||
| 298 | + buffer.append(" r." + key + " " + operator + " \"" + value + "\" "); | ||
| 299 | + } | ||
| 300 | + if (!flag) { | ||
| 301 | + buffer.append(") "); | ||
| 302 | + } | ||
| 303 | + | ||
| 304 | + } | ||
| 305 | + | ||
| 306 | + /** | ||
| 307 | + * 过滤多个字段表达式 | ||
| 308 | + * | ||
| 309 | + * @param buffer | ||
| 310 | + * @param list | ||
| 311 | + * @param innerJoin 【and or】 | ||
| 312 | + * @param operator 【== != 】 | ||
| 313 | + * @param outerJoin 【and or】 | ||
| 314 | + */ | ||
| 315 | + public static void appendMulFilterFlux(StringBuffer buffer, List<Map<String, Object>> list, String innerJoin, String operator, String outerJoin) { | ||
| 316 | + if (list == null || list.size() == 0) { | ||
| 317 | + return; | ||
| 318 | + } | ||
| 319 | + buffer.append("|> filter(fn: (r) => "); | ||
| 320 | + boolean outerFlag = true; | ||
| 321 | + for (int i = 0; i < list.size(); i++) { | ||
| 322 | + Map<String, Object> map = list.get(i); | ||
| 323 | + Set<Entry<String, Object>> entrySet = map.entrySet(); | ||
| 324 | + Iterator<Entry<String, Object>> iterator = entrySet.iterator(); | ||
| 325 | + boolean innerFlag = true; | ||
| 326 | + while (iterator.hasNext()) { | ||
| 327 | + Entry<String, Object> next = iterator.next(); | ||
| 328 | + String key = next.getKey(); | ||
| 329 | + Object value = next.getValue(); | ||
| 330 | + if (innerFlag) { | ||
| 331 | + if (outerFlag) { | ||
| 332 | + outerFlag = false; | ||
| 333 | + } else { | ||
| 334 | + buffer.append(outerJoin); | ||
| 335 | + } | ||
| 336 | + buffer.append(" ( "); | ||
| 337 | + innerFlag = false; | ||
| 338 | + } else { | ||
| 339 | + buffer.append(innerJoin); | ||
| 340 | + } | ||
| 341 | + buffer.append(" r." + key + " " + operator + " \"" + value + "\" "); | ||
| 342 | + } | ||
| 343 | + if (!innerFlag) { | ||
| 344 | + buffer.append(" ) "); | ||
| 345 | + } | ||
| 346 | + } | ||
| 347 | + buffer.append(" ) "); | ||
| 348 | + | ||
| 349 | + } | ||
| 350 | + | ||
| 351 | + /** | ||
| 352 | + * 时间窗口统计 | ||
| 353 | + * | ||
| 354 | + * @param buffer | ||
| 355 | + * @param step 步长值【10m,1h,1d...】 | ||
| 356 | + * @param aggType 统计类型【sum,count,min,max...) | ||
| 357 | + */ | ||
| 358 | + public static void appendAggregateWindowFlux(StringBuffer buffer, String step, String aggType) { | ||
| 359 | + buffer.append("|> aggregateWindow(every: " + step + ", fn: " + aggType + ") "); | ||
| 360 | + } | ||
| 361 | + | ||
| 362 | + public static void appendWindowFlux(StringBuffer buffer, String step) { | ||
| 363 | + buffer.append("|> window(every: " + step + ") "); | ||
| 364 | + } | ||
| 365 | + | ||
| 366 | + /** | ||
| 367 | + * 不带时间窗口统计 | ||
| 368 | + * | ||
| 369 | + * @param buffer | ||
| 370 | + * @param aggType 统计类型【sum,count,min,max...) | ||
| 371 | + */ | ||
| 372 | + public static void appendAggregateFlux(StringBuffer buffer, String aggType) { | ||
| 373 | + buffer.append("|> " + aggType + "() "); | ||
| 374 | + } | ||
| 375 | + | ||
| 376 | + | ||
| 377 | + /** | ||
| 378 | + * 多个查询结果需要指定每个输出结果名称 | ||
| 379 | + * | ||
| 380 | + * @param buffer | ||
| 381 | + * @param name | ||
| 382 | + */ | ||
| 383 | + public static void appendYieldFlux(StringBuffer buffer, String name) { | ||
| 384 | + buffer.append("|> yield(name: \"" + name + "\") "); | ||
| 385 | + } | ||
| 386 | + | ||
| 387 | + /** | ||
| 388 | + * 将时间指定为某单位 | ||
| 389 | + * | ||
| 390 | + * @param buffer | ||
| 391 | + * @param step | ||
| 392 | + */ | ||
| 393 | + public static void appendTruncateTimeColumn(StringBuffer buffer, String step) { | ||
| 394 | + buffer.append("|> truncateTimeColumn(unit: " + step + ") "); | ||
| 395 | + } | ||
| 396 | + | ||
| 397 | + /** | ||
| 398 | + * 导入包名 | ||
| 399 | + * | ||
| 400 | + * @param buffer | ||
| 401 | + * @param name 包名 | ||
| 402 | + */ | ||
| 403 | + public static void appendImportFlux(StringBuffer buffer, String name) { | ||
| 404 | + buffer.append("import \"" + name + "\" "); | ||
| 405 | + } | ||
| 406 | + | ||
| 407 | + /** | ||
| 408 | + * 过滤空值 | ||
| 409 | + * | ||
| 410 | + * @param buffer | ||
| 411 | + */ | ||
| 412 | + public static void appendExistsFlux(StringBuffer buffer) { | ||
| 413 | + buffer.append("|> filter(fn: (r) => exists r._value ) "); | ||
| 414 | + } | ||
| 415 | + | ||
| 416 | + | ||
| 417 | + /** | ||
| 418 | + * 过滤0值 | ||
| 419 | + * | ||
| 420 | + * @param buffer | ||
| 421 | + */ | ||
| 422 | + public static void appendZeroFlux(StringBuffer buffer) { | ||
| 423 | + buffer.append("|> filter(fn: (r) => r._value > 0) "); | ||
| 424 | + } | ||
| 425 | + | ||
| 426 | +} |
| 1 | -# 项目相关配置 jhlt: # 名称 name: zhonglai # 版本 version: 3.8.2 # 版权年份 copyrightYear: 2023 # 开发环境配置 server: # 服务器的HTTP端口,默认为8080 port: 8067 servlet: # 应用的访问路径 context-path: / tomcat: # tomcat的URI编码 uri-encoding: UTF-8 # 连接数满后的排队数,默认为100 accept-count: 1000 threads: # tomcat最大线程数,默认为200 max: 800 # Tomcat启动初始化的线程数,默认值10 min-spare: 100 # 日志配置 logging: level: com.ruoyi: debug org.springframework: warn # Swagger配置 swagger: # 是否开启swagger enabled: true # 请求前缀 pathMapping: /dev-api | ||
| 1 | +# 项目相关配置 jhlt: # 名称 name: zhonglai # 版本 version: 3.8.2 # 版权年份 copyrightYear: 2023 # 开发环境配置 server: # 服务器的HTTP端口,默认为8080 port: 8067 servlet: # 应用的访问路径 context-path: / tomcat: # tomcat的URI编码 uri-encoding: UTF-8 # 连接数满后的排队数,默认为100 accept-count: 1000 threads: # tomcat最大线程数,默认为200 max: 800 # Tomcat启动初始化的线程数,默认值10 min-spare: 100 # 日志配置 logging: level: com.ruoyi: debug org.springframework: warn # Swagger配置 swagger: # 是否开启swagger enabled: true # 请求前缀 pathMapping: /dev-api # canal配置 canal: client: instances: example: host: 192.168.94.186 port: 11111 |
| 1 | -url=jdbc:mysql://rm-wz9740un21f09iokuao.mysql.rds.aliyuncs.com:3306/liu_yu_le?useUnicode=true&characterEncoding=utf8&autoReconnect=true | 1 | +# url=jdbc:mysql://rm-wz9740un21f09iokuao.mysql.rds.aliyuncs.com:3306/liu_yu_le?useUnicode=true&characterEncoding=utf8&autoReconnect=true |
| 2 | +# user = luhui | ||
| 3 | +# pass = Luhui586 | ||
| 4 | + | ||
| 5 | +url=jdbc:mysql://192.168.2.69:3306/ly_sensor_data_2020?useUnicode=true&characterEncoding=utf8&autoReconnect=true | ||
| 2 | user = luhui | 6 | user = luhui |
| 3 | pass = Luhui586 | 7 | pass = Luhui586 |
| 4 | 8 |
| @@ -56,6 +56,24 @@ public class DeviceProductProtocol { | @@ -56,6 +56,24 @@ public class DeviceProductProtocol { | ||
| 56 | deviceDataConfigList.add(new DeviceDataConfig(41,"SYS_ALARM",null,PLCDataType.故障代码,"00","设备故障报警","01","01")); | 56 | deviceDataConfigList.add(new DeviceDataConfig(41,"SYS_ALARM",null,PLCDataType.故障代码,"00","设备故障报警","01","01")); |
| 57 | deviceDataConfigList.add(new DeviceDataConfig(73,"YC_AUTO_P002",PLCType.排污,PLCDataType.控制器状态码,"41","远程自动按钮",null,"01")); | 57 | deviceDataConfigList.add(new DeviceDataConfig(73,"YC_AUTO_P002",PLCType.排污,PLCDataType.控制器状态码,"41","远程自动按钮",null,"01")); |
| 58 | 58 | ||
| 59 | + deviceDataConfigList.add(new DeviceDataConfig(1,"C001_RUN",PLCType.曝气,PLCDataType.控制器状态码,"42","运行信号")); | ||
| 60 | + deviceDataConfigList.add(new DeviceDataConfig(2,"C001_ALARM",PLCType.曝气,PLCDataType.故障代码,"42","故障信号","11","01")); | ||
| 61 | + deviceDataConfigList.add(new DeviceDataConfig(1,"P003_RUN",PLCType.微滤机,PLCDataType.控制器状态码,"43","微滤反洗水泵运行信号")); | ||
| 62 | + deviceDataConfigList.add(new DeviceDataConfig(2,"P003_ALARM",PLCType.微滤机,PLCDataType.故障代码,"43","微滤反洗水泵故障信号","11","01")); | ||
| 63 | + deviceDataConfigList.add(new DeviceDataConfig(1,"M001_RUN",PLCType.微滤机,PLCDataType.控制器状态码,"44","微滤驱动电机运行信号")); | ||
| 64 | + deviceDataConfigList.add(new DeviceDataConfig(2,"M001_ALARM",PLCType.微滤机,PLCDataType.故障代码,"44","微滤驱动电机故障信号","11","01")); | ||
| 65 | + deviceDataConfigList.add(new DeviceDataConfig(1,"DV01_RUN",PLCType.蛋分,PLCDataType.控制器状态码,"45","蛋分电磁阀运行信号")); | ||
| 66 | + deviceDataConfigList.add(new DeviceDataConfig(1,"DF_YW",PLCType.蛋分,PLCDataType.液位高低,"45","蛋分低液位")); | ||
| 67 | + deviceDataConfigList.add(new DeviceDataConfig(1,"ZWX_RUN",PLCType.杀菌,PLCDataType.控制器状态码,"46","紫外线运行信号")); | ||
| 68 | + deviceDataConfigList.add(new DeviceDataConfig(1,"JCY_RUN",PLCType.鱼儿乐,PLCDataType.控制器状态码,"47","水质监测仪运行信号")); | ||
| 69 | + deviceDataConfigList.add(new DeviceDataConfig(1,"ZM_RUN",null,PLCDataType.控制器状态码,"00","照明运行信号")); | ||
| 70 | + deviceDataConfigList.add(new DeviceDataConfig(1,"XHSC_YW_L",PLCType.循环水池,PLCDataType.低液位,"48","循环水池低液位")); | ||
| 71 | + deviceDataConfigList.add(new DeviceDataConfig(1,"XHSC_YW_H",PLCType.循环水池,PLCDataType.高液位,"49","循环水池高液位")); | ||
| 72 | + deviceDataConfigList.add(new DeviceDataConfig(1,"SWLT_YW_1",PLCType.生物滤筒,PLCDataType.液位高低,"50","生物滤筒液位1")); | ||
| 73 | + deviceDataConfigList.add(new DeviceDataConfig(1,"SWLT_YW_2",PLCType.生物滤筒,PLCDataType.液位高低,"51","生物滤筒液位2")); | ||
| 74 | + deviceDataConfigList.add(new DeviceDataConfig(1,"ZWX_YW",PLCType.杀菌,PLCDataType.液位高低,"52","紫外线液位")); | ||
| 75 | + deviceDataConfigList.add(new DeviceDataConfig(1,"WL_YW",PLCType.微滤机,PLCDataType.高液位,"53","微滤机池高液位")); | ||
| 76 | + | ||
| 59 | deviceDataWriteConfigList.add(new DeviceDataConfig(27,"YC_ST_C001A",PLCType.推水机,PLCDataType.控制器状态码,"01","远程启动按钮",null,"01")); | 77 | deviceDataWriteConfigList.add(new DeviceDataConfig(27,"YC_ST_C001A",PLCType.推水机,PLCDataType.控制器状态码,"01","远程启动按钮",null,"01")); |
| 60 | deviceDataWriteConfigList.add(new DeviceDataConfig(28,"YC_STP_C001A",PLCType.推水机,PLCDataType.控制器状态码,"01","远程停止按钮",null,"00")); | 78 | deviceDataWriteConfigList.add(new DeviceDataConfig(28,"YC_STP_C001A",PLCType.推水机,PLCDataType.控制器状态码,"01","远程停止按钮",null,"00")); |
| 61 | deviceDataWriteConfigList.add(new DeviceDataConfig(29,"YC_ST_C001B",PLCType.推水机,PLCDataType.控制器状态码,"02","远程启动按钮",null,"01")); | 79 | deviceDataWriteConfigList.add(new DeviceDataConfig(29,"YC_ST_C001B",PLCType.推水机,PLCDataType.控制器状态码,"02","远程启动按钮",null,"01")); |
| @@ -15,7 +15,9 @@ public enum PLCDataType { | @@ -15,7 +15,9 @@ public enum PLCDataType { | ||
| 15 | PH("8"), | 15 | PH("8"), |
| 16 | 液位高低("100"), | 16 | 液位高低("100"), |
| 17 | 复位("101"), | 17 | 复位("101"), |
| 18 | - 控制柜状态("9"); | 18 | + 低液位("102"), |
| 19 | + 高液位("103"), | ||
| 20 | + 控制柜状态("104"); | ||
| 19 | public String sensorDataType; | 21 | public String sensorDataType; |
| 20 | 22 | ||
| 21 | PLCDataType(String sensorDataType) | 23 | PLCDataType(String sensorDataType) |
| @@ -11,6 +11,11 @@ public enum PLCType { | @@ -11,6 +11,11 @@ public enum PLCType { | ||
| 11 | 推水机(7), | 11 | 推水机(7), |
| 12 | 中转泵(9), | 12 | 中转泵(9), |
| 13 | 中转池(10), | 13 | 中转池(10), |
| 14 | + 微滤机(11), | ||
| 15 | + 蛋分(12), | ||
| 16 | + 循环水池(13), | ||
| 17 | + 杀菌(14), | ||
| 18 | + 生物滤筒(15), | ||
| 14 | 备用(8); | 19 | 备用(8); |
| 15 | 20 | ||
| 16 | private Integer device_terminal_type_key; | 21 | private Integer device_terminal_type_key; |
| @@ -24,7 +24,7 @@ mqtt: | @@ -24,7 +24,7 @@ mqtt: | ||
| 24 | clientId: ${random.uuid} | 24 | clientId: ${random.uuid} |
| 25 | #公司id | 25 | #公司id |
| 26 | roleid: 2 | 26 | roleid: 2 |
| 27 | - mqtt_usernames: PLC_004 | 27 | + mqtt_usernames: PLC_006,PLC_004 |
| 28 | #订阅的topic | 28 | #订阅的topic |
| 29 | topics: ALL_POST,PUT_REQ | 29 | topics: ALL_POST,PUT_REQ |
| 30 | sub_clientid: '#' | 30 | sub_clientid: '#' |
| @@ -148,6 +148,7 @@ | @@ -148,6 +148,7 @@ | ||
| 148 | <artifactId>lh-jar-device-analysis</artifactId> | 148 | <artifactId>lh-jar-device-analysis</artifactId> |
| 149 | <scope>compile</scope> | 149 | <scope>compile</scope> |
| 150 | </dependency> | 150 | </dependency> |
| 151 | + | ||
| 151 | </dependencies> | 152 | </dependencies> |
| 152 | 153 | ||
| 153 | <build> | 154 | <build> |
| 1 | package com.zhonglai.luhui.mqtt; | 1 | package com.zhonglai.luhui.mqtt; |
| 2 | 2 | ||
| 3 | +import com.ruoyi.common.utils.GsonConstructor; | ||
| 4 | +import com.zhonglai.luhui.device.analysis.comm.service.redis.RedisService; | ||
| 3 | import com.zhonglai.luhui.mqtt.comm.service.TerminalService; | 5 | import com.zhonglai.luhui.mqtt.comm.service.TerminalService; |
| 4 | import com.zhonglai.luhui.mqtt.service.ClienNoticeService; | 6 | import com.zhonglai.luhui.mqtt.service.ClienNoticeService; |
| 5 | import org.slf4j.Logger; | 7 | import org.slf4j.Logger; |
| @@ -10,6 +12,10 @@ import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; | @@ -10,6 +12,10 @@ import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; | ||
| 10 | import org.springframework.boot.builder.SpringApplicationBuilder; | 12 | import org.springframework.boot.builder.SpringApplicationBuilder; |
| 11 | import org.springframework.context.annotation.ComponentScan; | 13 | import org.springframework.context.annotation.ComponentScan; |
| 12 | 14 | ||
| 15 | +import javax.annotation.PostConstruct; | ||
| 16 | +import java.util.HashMap; | ||
| 17 | +import java.util.Map; | ||
| 18 | + | ||
| 13 | 19 | ||
| 14 | @ComponentScan(basePackages = { | 20 | @ComponentScan(basePackages = { |
| 15 | "com.zhonglai.luhui.device.analysis", | 21 | "com.zhonglai.luhui.device.analysis", |
| @@ -22,11 +28,24 @@ import org.springframework.context.annotation.ComponentScan; | @@ -22,11 +28,24 @@ import org.springframework.context.annotation.ComponentScan; | ||
| 22 | @SpringBootApplication(exclude= {DataSourceAutoConfiguration.class}) | 28 | @SpringBootApplication(exclude= {DataSourceAutoConfiguration.class}) |
| 23 | public class MqttApplication { | 29 | public class MqttApplication { |
| 24 | private static Logger log = LoggerFactory.getLogger(MqttApplication.class); | 30 | private static Logger log = LoggerFactory.getLogger(MqttApplication.class); |
| 31 | + @Autowired | ||
| 32 | + private RedisService redisService ; | ||
| 25 | 33 | ||
| 26 | public static void main(String[] args) { | 34 | public static void main(String[] args) { |
| 27 | log.info("启动服务"); | 35 | log.info("启动服务"); |
| 28 | SpringApplicationBuilder builder = new SpringApplicationBuilder(MqttApplication.class); | 36 | SpringApplicationBuilder builder = new SpringApplicationBuilder(MqttApplication.class); |
| 29 | builder.run( args); | 37 | builder.run( args); |
| 38 | + | ||
| 30 | } | 39 | } |
| 31 | 40 | ||
| 41 | +// @PostConstruct | ||
| 42 | +// public void test() | ||
| 43 | +// { | ||
| 44 | +// Map<String,Object> map = new HashMap<>(); | ||
| 45 | +// map.put("deviceName","SC-M8 号"); | ||
| 46 | +// String key ="ly:x6:devices:866838067734877_1_1"; | ||
| 47 | +// System.out.println(GsonConstructor.get().toJson(redisService.hmget(key,"deviceName")));; | ||
| 48 | +// redisService.hmset(key,map); | ||
| 49 | +// } | ||
| 50 | + | ||
| 32 | } | 51 | } |
| 1 | +package com.zhonglai.luhui.mqtt; | ||
| 2 | + | ||
| 3 | +import com.ruoyi.common.utils.GsonConstructor; | ||
| 4 | +import redis.clients.jedis.Jedis; | ||
| 5 | + | ||
| 6 | +import java.util.HashMap; | ||
| 7 | +import java.util.Map; | ||
| 8 | + | ||
| 9 | +public class Test { | ||
| 10 | + public static void main(String[] args) { | ||
| 11 | + Jedis jedis = new Jedis("47.112.163.61",9527); | ||
| 12 | + jedis.auth("Luhui586"); | ||
| 13 | + jedis.select(1); | ||
| 14 | + | ||
| 15 | + // 连接Redis并检查连接状态 | ||
| 16 | + String pingResponse = jedis.ping(); | ||
| 17 | + System.out.println("Ping Response: " + pingResponse); | ||
| 18 | + | ||
| 19 | + Map<String,String> map = new HashMap<>(); | ||
| 20 | + map.put("deviceName","SC-M8 号"); | ||
| 21 | + String key ="ly:x6:devices:866838067734877_1_1"; | ||
| 22 | + System.out.println(GsonConstructor.get().toJson(jedis.hmget(key,"deviceName")));; | ||
| 23 | + | ||
| 24 | + jedis.hmset(key,map); | ||
| 25 | +// // 更新Redis中的key-value数据 | ||
| 26 | +// jedis.set("key", "newValue"); | ||
| 27 | +// | ||
| 28 | +// // 获取并输出更新后的值 | ||
| 29 | +// String value = jedis.get("key"); | ||
| 30 | +// System.out.println("Updated Value: " + value); | ||
| 31 | + | ||
| 32 | + // 关闭连接 | ||
| 33 | + jedis.close(); | ||
| 34 | + } | ||
| 35 | +} |
| @@ -41,15 +41,15 @@ spring: | @@ -41,15 +41,15 @@ spring: | ||
| 41 | 41 | ||
| 42 | mqtt: | 42 | mqtt: |
| 43 | #链接地址 | 43 | #链接地址 |
| 44 | - broker: tcp://175.24.61.68:1883 | 44 | + broker: tcp://127.0.0.1:1883 |
| 45 | #唯一标识 | 45 | #唯一标识 |
| 46 | clientId: ${random.uuid} | 46 | clientId: ${random.uuid} |
| 47 | #公司id | 47 | #公司id |
| 48 | roleid: 2 | 48 | roleid: 2 |
| 49 | - mqtt_usernames: 6_WP,12_BPQ,10_TLJ,NWDB_2023,WLJ_1,YWB_A700E,12_ZNZY | 49 | + mqtt_usernames: 6_WP |
| 50 | #订阅的topic | 50 | #订阅的topic |
| 51 | topics: ADD_POST,ALL_POST,DB_TOPIC_DISTRIBUTE,GET/+,online,PUT_REQ/+,READ_REQ/+ | 51 | topics: ADD_POST,ALL_POST,DB_TOPIC_DISTRIBUTE,GET/+,online,PUT_REQ/+,READ_REQ/+ |
| 52 | - sub_clientid: '866520063012785' | 52 | + sub_clientid: '+' |
| 53 | topicconfig: "/{{roleid}}/{{username}}/{{clientid}}/{{payloadtype}}/{{topicType}}/{{messageid}}" | 53 | topicconfig: "/{{roleid}}/{{username}}/{{clientid}}/{{payloadtype}}/{{topicType}}/{{messageid}}" |
| 54 | top_return_map: '{"PUT":"PUT_REQ","READ":"READ_REQ"}' | 54 | top_return_map: '{"PUT":"PUT_REQ","READ":"READ_REQ"}' |
| 55 | username: sysuser | 55 | username: sysuser |
| @@ -58,6 +58,7 @@ mqtt: | @@ -58,6 +58,7 @@ mqtt: | ||
| 58 | #客户端操作时间 | 58 | #客户端操作时间 |
| 59 | operationTime: 10 | 59 | operationTime: 10 |
| 60 | 60 | ||
| 61 | + | ||
| 61 | sys: | 62 | sys: |
| 62 | redis: | 63 | redis: |
| 63 | field: "lh:mqttservice:" | 64 | field: "lh:mqttservice:" |
| @@ -523,6 +523,16 @@ | @@ -523,6 +523,16 @@ | ||
| 523 | <artifactId>commons-pool</artifactId> | 523 | <artifactId>commons-pool</artifactId> |
| 524 | <version>1.6</version> | 524 | <version>1.6</version> |
| 525 | </dependency> | 525 | </dependency> |
| 526 | + <dependency> | ||
| 527 | + <groupId>com.zendesk</groupId> | ||
| 528 | + <artifactId>mysql-binlog-connector-java</artifactId> | ||
| 529 | + <version>0.25.0</version> | ||
| 530 | + </dependency> | ||
| 531 | + <dependency> | ||
| 532 | + <groupId>com.influxdb</groupId> | ||
| 533 | + <artifactId>influxdb-client-java</artifactId> | ||
| 534 | + <version>6.12.0</version> | ||
| 535 | + </dependency> | ||
| 526 | </dependencies> | 536 | </dependencies> |
| 527 | 537 | ||
| 528 | 538 |
-
请 注册 或 登录 后发表评论