This commit is contained in:
2024-04-21 16:32:18 +08:00
parent c607d1672d
commit fcc8c0c959
13 changed files with 2893 additions and 30 deletions

View File

@@ -13,6 +13,8 @@ import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.annotations.Comment;
import org.hibernate.annotations.Fetch;
import org.hibernate.annotations.FetchMode;
import java.time.LocalDate;
import java.time.LocalTime;
@@ -33,12 +35,15 @@ public class CoalWashingDailyAnalysisEntity extends OrgCommonEntity {
@Comment("日期")
private LocalDate date;
@ElementCollection
@ElementCollection()
@CollectionTable(foreignKey = @ForeignKey(ConstraintMode.NO_CONSTRAINT))
@Fetch(FetchMode.SUBSELECT)
private List<CoalWashingDailyAnalysisItemVo> inputItems;
@ElementCollection
@ElementCollection()
@CollectionTable(foreignKey = @ForeignKey(ConstraintMode.NO_CONSTRAINT))
@Fetch(FetchMode.SUBSELECT)
private List<CoalWashingDailyAnalysisKFItemVo> kfItems;
// @ElementCollection
@@ -46,8 +51,10 @@ public class CoalWashingDailyAnalysisEntity extends OrgCommonEntity {
// @CollectionTable(foreignKey = @ForeignKey(ConstraintMode.NO_CONSTRAINT))
// private List<CoalWashingDailyAnalysisItemVo> rollingAvgItems;
@ElementCollection
@ElementCollection()
@CollectionTable(foreignKey = @ForeignKey(ConstraintMode.NO_CONSTRAINT))
@Fetch(FetchMode.SUBSELECT)
private List<CoalWashingDailyAnalysisParamVo> paramsInfo;
@Comment("目标大堆 灰")

View File

@@ -1,9 +1,9 @@
package cn.lihongjie.coal.dictionary.dto;
import cn.lihongjie.coal.base.dto.CommonDto;
import cn.lihongjie.coal.base.dto.SimpleDto;
import cn.lihongjie.coal.common.DictCode;
import cn.lihongjie.coal.pojoProcessor.DictTranslate;
import cn.lihongjie.coal.script.entity.ScriptEntity;
import lombok.Data;
@@ -20,5 +20,6 @@ public class DictionaryDto extends CommonDto {
@DictTranslate(dictKey = DictCode.DICT_COMPONENTTYPE)
private String componentTypeName;
private ScriptEntity script;
private SimpleDto script;
}

View File

@@ -1,11 +1,15 @@
package cn.lihongjie.coal.dictionary.dto;
import cn.lihongjie.coal.base.dto.TreeDto;
import java.util.List;
import lombok.Data;
import java.util.List;
@Data
public class DictionaryTreeDto extends DictionaryDto {
private List<TreeDto> tree;
}

View File

@@ -10,6 +10,8 @@ import jakarta.persistence.*;
import lombok.Data;
import org.hibernate.annotations.Comment;
import org.hibernate.annotations.Fetch;
import org.hibernate.annotations.FetchMode;
import java.time.LocalDate;
import java.util.List;
@@ -68,10 +70,12 @@ public class EmployeeEntity extends OrgCommonEntity {
@Comment("身份证照片")
@OneToMany
@Fetch(FetchMode.SUBSELECT)
private List<FileEntity> idImages;
@Comment("银行卡照片")
@OneToMany
@Fetch(FetchMode.SUBSELECT)
private List<FileEntity> bankCardImages;
@Comment("部门")
@@ -101,15 +105,18 @@ public class EmployeeEntity extends OrgCommonEntity {
@ElementCollection
@Fetch(FetchMode.SUBSELECT)
private List<EmpFamilyMemberVO> familyMembers;
@ElementCollection
@Fetch(FetchMode.SUBSELECT)
private List<EmpCertVO> certs;
@ElementCollection
@Fetch(FetchMode.SUBSELECT)
private List<EmpCarVO> cars;
/**

View File

@@ -93,27 +93,29 @@ public class PdcDeviceDataService
"""
with tmp as (select d.device_id,
max(pdc.code) as code ,
max(pdc.name) as name ,
pdc.coal_type,
time_bucket_gapfill(:bucket, d.time) as tb,
(max(d.total_data)) as total_data
public.time_bucket_gapfill(cast(:bucket as interval), d.time) as tb,
locf(max(d.total_data), (select max(total_data) from t_pdc_device_data dx where dx.device_id = d.device_id and dx.time < :startTime
) , true) as total_data
from t_pdc_device_data d
left join t_pdc_device pdc on d.device_id = pdc.id
WHERE d.time > :startTime
AND d.time < :endTime
WHERE d.time >= :startTime
AND d.time <= :endTime
and pdc.device_group = :deviceGroup
and pdc.organization_id = :organizationId
group by d.device_id, pdc.coal_type, tb
order by tb),
tmp1 as (
select tmp.*, d.coal_type, d.code, d.name from tmp inner join t_pdc_device d on tmp.device_id = d.id
),
tmp2 as (select *,
round(cast(tmp.total_data - lag(tmp.total_data, 1, null)
over (partition by tmp.device_id order by tmp.tb ) as numeric),
3) as diff
from tmp),
from tmp1 tmp),
tmp3 as (select *
from tmp2
where coal_type = '2'),
@@ -142,7 +144,7 @@ public class PdcDeviceDataService
// 最多返回100条数据用于绘图, 最少10s一个点
var maxBucket = Math.max(10, duration.getSeconds() / 100);
nativeQuery.setParameter("bucket", maxBucket + " seconds");
nativeQuery.setParameter("bucket", maxBucket + " second");
nativeQuery.setParameter("organizationId", Ctx.currentUser().getOrganizationId());
List<Map> list =

View File

@@ -15,7 +15,7 @@ public interface PdcDeviceRealTimeDataRepository
@Query(
value =
"select id from t_pdc_device_real_time_data dd inner join t_pdc_device d on dd.device_id = d.id where d.id in :deviceIds and (extract(epoch from (now() - dd.last_save_time)) / 60) >= d.data_save_interval ",
"select dd.id from t_pdc_device_real_time_data dd inner join t_pdc_device d on dd.device_id = d.id where d.id in :deviceIds and (extract(epoch from (now() - dd.last_save_time)) / 60) >= d.data_save_interval ",
nativeQuery = true)
List<String> findNeedToSaveData(List<String> deviceIds);

View File

@@ -99,12 +99,18 @@ public class PdcDeviceRealTimeDataService
List<String> ids =
this.repository.findNeedToSaveData(
devices.stream().map(x -> x.getId()).collect(Collectors.toList()));
log.info("需要保存的皮带秤实时数据: {}", ids.size());
if (ids.isEmpty()){
return;
}
this.repository
.findAllById(ids)
.forEach(
x -> {
PdcDeviceDataEntity dataEntity = this.mapper.toDataEntity(x);
dataEntity.setId(null);
pdcDeviceDataService.save(dataEntity);
@@ -134,7 +140,7 @@ public class PdcDeviceRealTimeDataService
group by dd.device_id
)
,
tmp2 as (select rtd.*, d.coal_type as coal_type, d.name as device_name, d.code as device_code, d.device_group as device_group, round(cast(rtd.total_data - tmp.min_total as numeric), 3) as time_total
tmp2 as (select rtd.*, d.sort_key as d_sort_key, d.coal_type as coal_type, d.name as device_name, d.code as device_code, d.device_group as device_group, round(cast(rtd.total_data - tmp.min_total as numeric), 3) as time_total
from t_pdc_device_real_time_data rtd
inner join t_pdc_device d on rtd.device_id = d.id
inner join tmp on rtd.device_id = tmp.device_id)
@@ -143,7 +149,7 @@ public class PdcDeviceRealTimeDataService
select a.*, round(cast(case when b.time_total is null or b.time_total = 0 then 0 else ((a.time_total / b.time_total) * 100.0) end as numeric),3)as time_percent, b.time_total as bt from tmp2 a left join tmp2 b on b.coal_type = '2'
order by a.d_sort_key

View File

@@ -1,6 +1,8 @@
package cn.lihongjie.coal.permission.dto;
import cn.lihongjie.coal.base.dto.OrgCommonDto;
import cn.lihongjie.coal.common.DictCode;
import cn.lihongjie.coal.pojoProcessor.DictTranslate;
import lombok.Data;
@@ -10,6 +12,6 @@ public class PermissionSimpleDto extends OrgCommonDto {
private String parentName;
private String permissionType;
@DictTranslate(dictKey = DictCode.PERMISSION_TYPE)
private String permissionTypeName;
}

View File

@@ -12,6 +12,7 @@ import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.annotations.Comment;
import org.hibernate.annotations.Fetch;
import java.util.ArrayList;
import java.util.List;
@@ -27,14 +28,16 @@ public class ResourceEntity extends CommonEntity {
@ManyToMany(mappedBy = "resources")
@org.hibernate.annotations.Cache(
usage = org.hibernate.annotations.CacheConcurrencyStrategy.READ_WRITE)
@Fetch(org.hibernate.annotations.FetchMode.SUBSELECT)
private List<PermissionEntity> permissions;
@OneToMany(mappedBy = "parent", cascade = CascadeType.ALL)
@org.hibernate.annotations.Cache(
usage = org.hibernate.annotations.CacheConcurrencyStrategy.READ_WRITE)
@Fetch(org.hibernate.annotations.FetchMode.SUBSELECT)
private List<ResourceEntity> children;
@ManyToOne
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "parent_id", foreignKey = @ForeignKey(value = ConstraintMode.NO_CONSTRAINT))
private ResourceEntity parent;

View File

@@ -3,10 +3,7 @@ package cn.lihongjie.coal.weightDeviceData.entity;
import cn.lihongjie.coal.base.entity.OrgCommonEntity;
import cn.lihongjie.coal.weightDevice.entity.WeightDeviceEntity;
import jakarta.persistence.Column;
import jakarta.persistence.Entity;
import jakarta.persistence.FetchType;
import jakarta.persistence.ManyToOne;
import jakarta.persistence.*;
import lombok.Data;
@@ -14,6 +11,8 @@ import org.hibernate.annotations.ColumnDefault;
import org.hibernate.annotations.Comment;
import java.time.LocalDateTime;
import java.util.Objects;
import java.util.stream.Stream;
/**
* create table dbo.称重信息 ( <br>
@@ -194,7 +193,6 @@ import java.time.LocalDateTime;
@Entity
public class WeightDeviceDataEntity extends OrgCommonEntity {
@ManyToOne(fetch = FetchType.LAZY)
private WeightDeviceEntity device;
@@ -460,11 +458,27 @@ public class WeightDeviceDataEntity extends OrgCommonEntity {
@ColumnDefault("'0'")
private String archiveStatus = "0";
@Comment("最小时间, 用于排序")
@Column(insertable = false, updatable = false, columnDefinition = " timestamp(6) generated always as ( least(mz_time, pz_time, ycgbtime, ecgb_time) ) stored ")
private LocalDateTime minTime;
@Override
public void prePersist() {
super.prePersist();
this.updateMinTime();
}
@Override
public void preUpdate() {
super.preUpdate();
this.updateMinTime();
}
private void updateMinTime() {
this.minTime =
Stream.of(this.mzTime, this.pzTime, this.ycgbTIme, this.ecgbTime)
.filter(Objects::nonNull)
.min(LocalDateTime::compareTo)
.orElse(null);
}
}

View File

@@ -125,13 +125,24 @@ public class WeightDeviceDataService
.map(
x -> {
String s =
StringUtils.equalsAnyIgnoreCase(x.getFunction(), "sum", "min", "max", "avg") ?
x.getFunction()
+ "( cast( d."
+ CaseFormat.UPPER_CAMEL.to(
CaseFormat.LOWER_UNDERSCORE,
x.getFieldName())
+ " as numeric)) "
:
x.getFunction()
+ "( d."
+ CaseFormat.UPPER_CAMEL.to(
CaseFormat.LOWER_UNDERSCORE,
x.getFieldName())
+ ") ";
+ ") ";
if (ObjectUtils.notEqual(x.getFunction(), "count")) {
s = "round(cast(" + s + " as numeric), 2)";

View File

@@ -0,0 +1 @@
update t_weight_device_data set min_time = least(mz_time, pz_time, ycgbtime, ecgb_time);

File diff suppressed because it is too large Load Diff