This commit is contained in:
2024-04-30 19:40:28 +08:00
parent 9222d2c421
commit 266bb3693b
9 changed files with 148 additions and 48 deletions

View File

@@ -149,13 +149,13 @@ public class EmDeviceService extends BaseService<EmDeviceEntity, EmDeviceReposit
"yyyy-MM-dd HH:mm:ss")));
} catch (Exception e) {
log.error("get his data error {} ", deviceIds, e);
log.debug("get his data error {} ", deviceIds, e);
continue;
}
String code = hisData.code();
if (ObjectUtils.notEqual(code, "1000")) {
log.error(
log.debug(
"get his data error {} {} {}",
hisData.code(),
hisData.message(),
@@ -166,7 +166,7 @@ public class EmDeviceService extends BaseService<EmDeviceEntity, EmDeviceReposit
List<DustApi.DeviceData> data = hisData.data();
if (CollectionUtils.isEmpty(data)) {
log.error(
log.debug(
"get his data empty {} {} {}",
hisData.code(),
hisData.message(),
@@ -247,16 +247,17 @@ public class EmDeviceService extends BaseService<EmDeviceEntity, EmDeviceReposit
realtimeData =
dustApi.getRealtimeData(
devices.get(0).getThirdAccount().getToken(), deviceIds);
} catch (Exception e) {
log.error("get realtime data error {} ", deviceIds, e);
log.debug("get realtime data error {} ", deviceIds, e);
continue;
}
String code = realtimeData.code();
if (ObjectUtils.notEqual(code, "1000")) {
log.error(
log.debug(
"get realtime data error {} {} {}",
realtimeData.code(),
realtimeData.message(),
@@ -267,7 +268,7 @@ public class EmDeviceService extends BaseService<EmDeviceEntity, EmDeviceReposit
List<DustApi.DeviceData> data = realtimeData.data();
if (CollectionUtils.isEmpty(data)) {
log.error(
log.debug(
"get realtime data empty {} {} {}",
realtimeData.code(),
realtimeData.message(),
@@ -297,7 +298,7 @@ public class EmDeviceService extends BaseService<EmDeviceEntity, EmDeviceReposit
data.stream()
.peek(x -> {
if (!x.notEmpty()){
log.error("data is empty {}", x);
log.debug("data is empty {}", x);
}
})
@@ -321,10 +322,10 @@ public class EmDeviceService extends BaseService<EmDeviceEntity, EmDeviceReposit
emDeviceDataService.saveAll(collect);
log.info("get realtime data success {} ", deviceIds);
log.debug("get realtime data success {} ", deviceIds);
}
} catch (Exception e) {
log.error("query realtime data error", e);
log.debug("query realtime data error", e);
}
}
}

View File

@@ -12,12 +12,10 @@ import cn.lihongjie.coal.file.entity.FileEntity;
import cn.lihongjie.coal.file.mapper.FileMapper;
import cn.lihongjie.coal.file.repository.FileRepository;
import cn.lihongjie.coal.spring.config.AliyunProperty;
import cn.lihongjie.coal.spring.config.HwCloudProperty;
import com.aliyun.oss.OSSClient;
import com.aliyun.oss.model.OSSObject;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.obs.services.ObsClient;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@@ -43,8 +41,6 @@ import java.util.UUID;
@Slf4j
@Transactional
public class FileService extends BaseService<FileEntity, FileRepository> {
@Autowired ObsClient obsClient;
@Autowired HwCloudProperty hwCloudProperty;
@Autowired AliyunProperty aliyunProperty;

View File

@@ -1,6 +1,5 @@
package cn.lihongjie.coal.ip;
import jakarta.annotation.PostConstruct;
import lombok.SneakyThrows;
@@ -20,8 +19,7 @@ import java.net.InetAddress;
@Service
@Slf4j
@Transactional
public
class IpQueryService {
public class IpQueryService {
@Autowired Environment environment;
@@ -30,6 +28,7 @@ class IpQueryService {
@Value("classpath:ip/ip2region.xdb")
Resource dbFile;
private Searcher searcher;
private static boolean isPrivateAddress(InetAddress inetAddress) {
@@ -37,21 +36,19 @@ class IpQueryService {
int byte1 = addressBytes[0] & 0xff;
int byte2 = addressBytes[1] & 0xff;
return (byte1 == 10) ||
(byte1 == 172 && (byte2 >= 16 && byte2 <= 31)) ||
(byte1 == 192 && byte2 == 168);
}
@SneakyThrows
public static void main(String[] args){
System.out.println(isPrivateAddress(InetAddress.getByName("192.168.0.1")));
return (byte1 == 10)
|| (byte1 == 172 && (byte2 >= 16 && byte2 <= 31))
|| (byte1 == 192 && byte2 == 168);
}
@SneakyThrows
@PostConstruct
public void init() {
// initDB();
}
private void initDB() {
// 2、使用上述的 cBuff 创建一个完全基于内存的查询对象。
try {
searcher = Searcher.newWithBuffer(dbFile.getContentAsByteArray());
@@ -65,6 +62,16 @@ class IpQueryService {
@SneakyThrows
public String query(String ipaddr) {
if (searcher == null) {
synchronized (this) {
if (searcher == null) {
initDB();
}
}
}
if (searcher != null) {
InetAddress ipAddress = InetAddress.getByName(ipaddr);
if (ipAddress.isAnyLocalAddress()) {
@@ -77,7 +84,6 @@ class IpQueryService {
return "内网地址";
}
try {
String search = searcher.search(ipaddr);

View File

@@ -17,7 +17,7 @@ public interface JobPostRepository extends BaseRepository<JobPostEntity> {
@Query(
"""
select e.jobPost.id as id, array_agg(e.id) as empIds from EmployeeEntity e
select e.jobPost.id as id, array_agg(e.id) within group(order by e.id) as empIds from EmployeeEntity e
where e.jobPost.id in :ids
group by e.jobPost.id
""")

View File

@@ -4,15 +4,14 @@ import com.obs.services.ObsClient;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.stereotype.Component;
@Component
//@Component
@Slf4j
public class HwCloudConfig {
@Autowired HwCloudProperty hwCloudProperty;
// @Autowired
HwCloudProperty hwCloudProperty;
@Bean
public ObsClient obsClient() {

View File

@@ -1,15 +1,34 @@
package cn.lihongjie.coal.spring.config;
import static org.hibernate.type.SqlTypes.VARCHAR;
import cn.lihongjie.coal.annotation.HyperTable;
import com.fasterxml.jackson.databind.JsonNode;
import io.hypersistence.utils.hibernate.type.json.internal.JsonBinaryJdbcTypeDescriptor;
import io.hypersistence.utils.hibernate.type.json.internal.JsonNodeJavaTypeDescriptor;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.model.FunctionContributions;
import org.hibernate.boot.model.TypeContributions;
import org.hibernate.boot.model.relational.SqlStringGenerationContext;
import org.hibernate.dialect.PostgreSQLDialect;
import org.hibernate.engine.jdbc.Size;
import org.hibernate.mapping.ForeignKey;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.mapping.Table;
import org.hibernate.service.ServiceRegistry;
import org.hibernate.tool.schema.spi.Exporter;
import org.hibernate.type.BasicTypeRegistry;
import org.hibernate.type.SqlTypes;
import org.hibernate.type.Type;
import org.hibernate.type.descriptor.java.JavaType;
import org.hibernate.type.descriptor.jdbc.JdbcType;
import org.hibernate.type.descriptor.sql.DdlType;
import org.hibernate.type.descriptor.sql.internal.DdlTypeImpl;
import org.hibernate.type.descriptor.sql.spi.DdlTypeRegistry;
import org.hibernate.type.internal.BasicTypeImpl;
import java.util.Arrays;
import java.util.Optional;
@@ -17,6 +36,40 @@ import java.util.stream.Collectors;
public class MyPostgreSQLDialect extends PostgreSQLDialect {
@Override
protected void registerColumnTypes(TypeContributions typeContributions, ServiceRegistry serviceRegistry) {
super.registerColumnTypes(typeContributions, serviceRegistry);
DdlTypeRegistry ddlTypeRegistry = typeContributions
.getTypeConfiguration()
.getDdlTypeRegistry();
BasicTypeRegistry basicTypeRegistry = typeContributions
.getTypeConfiguration()
.getBasicTypeRegistry();
// hibernate auto ddl 覆盖 varchar 类型 为 text 类型
ddlTypeRegistry.addDescriptor(new DdlTypeImpl(VARCHAR, "text", this));
// hibernate auto ddl 注册 jsonb 类型
ddlTypeRegistry.addDescriptor(new OtherDdlType());
// 注册jsonnode java类型
typeContributions.getTypeConfiguration().getJavaTypeRegistry().addDescriptor(JsonNodeJavaTypeDescriptor.INSTANCE);
// 注册jsonnode jdbc类型
typeContributions.getTypeConfiguration().getJdbcTypeRegistry().addDescriptor(JsonBinaryJdbcTypeDescriptor.INSTANCE);
// 注册jsonnode java类型到jdbc类型的映射
basicTypeRegistry
.register(
new BasicTypeImpl<>(
JsonNodeJavaTypeDescriptor.INSTANCE,
JsonBinaryJdbcTypeDescriptor.INSTANCE), JsonNode.class.getCanonicalName());
}
@Override
public void initializeFunctionRegistry(FunctionContributions functionContributions) {
super.initializeFunctionRegistry(functionContributions);
@@ -25,21 +78,62 @@ public class MyPostgreSQLDialect extends PostgreSQLDialect {
}
@Override
protected String columnType(int sqlTypeCode) {
if (sqlTypeCode == SqlTypes.VARCHAR) {
return "text";
}
return super.columnType(sqlTypeCode);
public Exporter<ForeignKey> getForeignKeyExporter() {
return new Exporter<ForeignKey>() {
@Override
public String[] getSqlCreateStrings(
ForeignKey exportable, Metadata metadata, SqlStringGenerationContext context) {
return new String[0];
}
@Override
public String[] getSqlDropStrings(
ForeignKey exportable, Metadata metadata, SqlStringGenerationContext context) {
return new String[0];
}
};
}
@Override
public String getAddForeignKeyConstraintString(
String constraintName,
String[] foreignKey,
String referencedTable,
String[] primaryKey,
boolean referencesPrimaryKey) {
return " DROP CONSTRAINT IF EXISTS notexist ";
public static class OtherDdlType implements DdlType {
@Override
public int getSqlTypeCode() {
return SqlTypes.OTHER;
}
@Override
public String getRawTypeName() {
return "";
}
@Override
public String getTypeName(Long size, Integer precision, Integer scale) {
return "";
}
@Override
public String getTypeName(Size columnSize, Type type, DdlTypeRegistry ddlTypeRegistry) {
Class<?> returnedClass = type.getReturnedClass();
if (returnedClass.isAssignableFrom(JsonNode.class)) {
return "jsonb";
}
return null;
}
@Override
public String getCastTypeName(JdbcType jdbcType, JavaType<?> javaType) {
return "";
}
@Override
public String getCastTypeName(JdbcType jdbcType, JavaType<?> javaType, Long length, Integer precision, Integer scale) {
return "";
}
}
@Override
@@ -107,9 +201,5 @@ public class MyPostgreSQLDialect extends PostgreSQLDialect {
};
}
@Override
public String getAddForeignKeyConstraintString(
String constraintName, String foreignKeyDefinition) {
return " DROP CONSTRAINT IF EXISTS notexist ";
}
}

View File

@@ -31,6 +31,8 @@ public class QuartzConfig {
public void customize(SchedulerFactoryBean schedulerFactoryBean) {
if (dataSource instanceof P6DataSource p6DataSource) {
// schedulerFactoryBean.setDataSource(dataSource);
schedulerFactoryBean.setDataSource(p6DataSource.unwrap(DataSource.class));
} else {

View File

@@ -1,6 +1,7 @@
package cn.lihongjie.coal.spring.config;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
@@ -10,4 +11,6 @@ import org.springframework.data.redis.connection.RedisConnectionFactory;
public class RedisConfig {
@Autowired RedisConnectionFactory redisConnectionFactory;
}

View File

@@ -62,8 +62,11 @@ spring:
devtools:
restart:
enabled: true
livereload:
enabled: false
jpa:
hibernate:
ddl-auto: update