Compare commits
6 Commits
ccad35f169
...
f4020e2793
Author | SHA1 | Date |
---|---|---|
|
f4020e2793 | 3 months ago |
|
7c56e7cd38 | 3 months ago |
|
12bcc9f503 | 3 months ago |
|
916954454c | 3 months ago |
|
67f2738851 | 3 months ago |
|
883925c287 | 3 months ago |
@ -0,0 +1,20 @@
|
||||
package com.pjilisense.flxai.dao;
|
||||
|
||||
import com.pjilisense.flxai.base.dao.BaseDao;
|
||||
import com.pjilisense.flxai.entity.ChunksEntity;
|
||||
import org.apache.ibatis.annotations.Mapper;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
@Mapper
|
||||
public interface ChunksDao extends BaseDao<ChunksEntity> {
|
||||
List<Map<String,Object>> queryCsv(Map<String, Object> map);
|
||||
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
package com.pjilisense.flxai.dao;
|
||||
|
||||
import com.pjilisense.flxai.base.dao.BaseDao;
|
||||
import com.pjilisense.flxai.entity.DigitalImgEntity;
|
||||
import org.apache.ibatis.annotations.Mapper;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-26
|
||||
*/
|
||||
@Mapper
|
||||
public interface DigitalImgDao extends BaseDao<DigitalImgEntity> {
|
||||
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
package com.pjilisense.flxai.dao;
|
||||
|
||||
import com.pjilisense.flxai.base.dao.BaseDao;
|
||||
import com.pjilisense.flxai.entity.FileChunksEntity;
|
||||
import org.apache.ibatis.annotations.Mapper;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
@Mapper
|
||||
public interface FileChunksDao extends BaseDao<FileChunksEntity> {
|
||||
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
package com.pjilisense.flxai.dao;
|
||||
|
||||
import com.pjilisense.flxai.base.dao.BaseDao;
|
||||
import com.pjilisense.flxai.entity.GlobalFilesEntity;
|
||||
import org.apache.ibatis.annotations.Mapper;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
@Mapper
|
||||
public interface GlobalFilesDao extends BaseDao<GlobalFilesEntity> {
|
||||
|
||||
}
|
@ -0,0 +1,16 @@
|
||||
package com.pjilisense.flxai.dao;
|
||||
|
||||
import com.pjilisense.flxai.base.dao.BaseDao;
|
||||
import com.pjilisense.flxai.entity.MyDigitalHumanEntity;
|
||||
import org.apache.ibatis.annotations.Mapper;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-26
|
||||
*/
|
||||
@Mapper
|
||||
public interface MyDigitalHumanDao extends BaseDao<MyDigitalHumanEntity> {
|
||||
|
||||
}
|
@ -0,0 +1,64 @@
|
||||
package com.pjilisense.flxai.dto;
|
||||
|
||||
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(value = "${comments}")
|
||||
public class ChunksDTO implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
private UUID id;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
private String text;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
private String abstractx;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
private Map<String,Object> metadata;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
private Integer index;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
private String type;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
@JsonSerialize(using = com.pjilisense.flxai.wrapper.OffsetDateTimeSerializer.class)
|
||||
@JsonDeserialize(using = com.pjilisense.flxai.wrapper.OffsetDateTimeDeserializer.class)
|
||||
private OffsetDateTime createdAt;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
@JsonSerialize(using = com.pjilisense.flxai.wrapper.OffsetDateTimeSerializer.class)
|
||||
@JsonDeserialize(using = com.pjilisense.flxai.wrapper.OffsetDateTimeDeserializer.class)
|
||||
private OffsetDateTime updatedAt;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
private String userId;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
@JsonSerialize(using = com.pjilisense.flxai.wrapper.OffsetDateTimeSerializer.class)
|
||||
@JsonDeserialize(using = com.pjilisense.flxai.wrapper.OffsetDateTimeDeserializer.class)
|
||||
private OffsetDateTime accessedAt;
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
package com.pjilisense.flxai.dto;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-26
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(value = "${comments}")
|
||||
public class DigitalImgDTO implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@ApiModelProperty(value = "主键")
|
||||
private String id;
|
||||
|
||||
@ApiModelProperty(value = "名称")
|
||||
private String name;
|
||||
|
||||
@ApiModelProperty(value = "图片存放的相对路径")
|
||||
private String imgPath;
|
||||
|
||||
@ApiModelProperty(value = "图片类型1数字人形象2数字人场景3数字人背景")
|
||||
private String imgType;
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,38 @@
|
||||
package com.pjilisense.flxai.dto;
|
||||
|
||||
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.Date;
|
||||
import java.util.UUID;
|
||||
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(value = "${comments}")
|
||||
public class FileChunksDTO implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
private String fileId;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
private UUID chunkId;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
@JsonSerialize(using = com.pjilisense.flxai.wrapper.OffsetDateTimeSerializer.class)
|
||||
@JsonDeserialize(using = com.pjilisense.flxai.wrapper.OffsetDateTimeDeserializer.class)
|
||||
private OffsetDateTime createdAt;
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,52 @@
|
||||
package com.pjilisense.flxai.dto;
|
||||
|
||||
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
|
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(value = "${comments}")
|
||||
public class GlobalFilesDTO implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
private String hashId;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
private String fileType;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
private Integer size;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
private String url;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
private Map<String,Object> metadata;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
@JsonSerialize(using = com.pjilisense.flxai.wrapper.OffsetDateTimeSerializer.class)
|
||||
@JsonDeserialize(using = com.pjilisense.flxai.wrapper.OffsetDateTimeDeserializer.class)
|
||||
private OffsetDateTime createdAt;
|
||||
|
||||
@ApiModelProperty(value = "$column.comments")
|
||||
@JsonSerialize(using = com.pjilisense.flxai.wrapper.OffsetDateTimeSerializer.class)
|
||||
@JsonDeserialize(using = com.pjilisense.flxai.wrapper.OffsetDateTimeDeserializer.class)
|
||||
private OffsetDateTime accessedAt;
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
package com.pjilisense.flxai.dto;
|
||||
|
||||
import io.swagger.annotations.ApiModel;
|
||||
import io.swagger.annotations.ApiModelProperty;
|
||||
import lombok.Data;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-27
|
||||
*/
|
||||
@Data
|
||||
@ApiModel(value = "${comments}")
|
||||
public class MyDigitalHumanDTO implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@ApiModelProperty(value = "主键")
|
||||
private String id;
|
||||
|
||||
@ApiModelProperty(value = "用户表ID")
|
||||
private String userid;
|
||||
|
||||
@ApiModelProperty(value = "数字形象ID数字图片表ID")
|
||||
private String imageid;
|
||||
|
||||
@ApiModelProperty(value = "场景ID数字图片表ID")
|
||||
private String sceneimgid;
|
||||
|
||||
@ApiModelProperty(value = "背景ID数字图片表ID")
|
||||
private String bkimgid;
|
||||
|
||||
@ApiModelProperty(value = "选择的声音ID")
|
||||
private String voiceid;
|
||||
|
||||
@ApiModelProperty(value = "编辑后的图片")
|
||||
private String videoimg;
|
||||
|
||||
@ApiModelProperty(value = "视频存放目录")
|
||||
private String videodir;
|
||||
|
||||
@ApiModelProperty(value = "声音存放目录")
|
||||
private String voicedir;
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,67 @@
|
||||
package com.pjilisense.flxai.entity;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.TableField;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import com.pjilisense.flxai.handle.JsonbTypeHandler;
|
||||
import com.pjilisense.flxai.handle.UUIDTypeHandler;
|
||||
import lombok.Data;
|
||||
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
@Data
|
||||
@TableName("chunks")
|
||||
public class ChunksEntity {
|
||||
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
@TableField(typeHandler = UUIDTypeHandler.class)
|
||||
private UUID id;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private String text;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
@TableField(value="abstract")
|
||||
private String abstractx;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
@TableField(value = "metadata", typeHandler = JsonbTypeHandler.class)
|
||||
private Map<String,Object> metadata;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private Integer index;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private String type;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private OffsetDateTime createdAt;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private OffsetDateTime updatedAt;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private String userId;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private OffsetDateTime accessedAt;
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
package com.pjilisense.flxai.entity;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-26
|
||||
*/
|
||||
@Data
|
||||
@TableName("digital_img")
|
||||
public class DigitalImgEntity {
|
||||
|
||||
/**
|
||||
* 主键
|
||||
*/
|
||||
private String id;
|
||||
/**
|
||||
* 名称
|
||||
*/
|
||||
private String name;
|
||||
/**
|
||||
* 图片存放的相对路径
|
||||
*/
|
||||
private String imgPath;
|
||||
/**
|
||||
* 图片类型1数字人形象2数字人场景3数字人背景
|
||||
*/
|
||||
private String imgType;
|
||||
}
|
@ -0,0 +1,35 @@
|
||||
package com.pjilisense.flxai.entity;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.TableField;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import com.pjilisense.flxai.handle.UUIDTypeHandler;
|
||||
import lombok.Data;
|
||||
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.Date;
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
@Data
|
||||
@TableName("file_chunks")
|
||||
public class FileChunksEntity {
|
||||
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private String fileId;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
@TableField(typeHandler = UUIDTypeHandler.class)
|
||||
private UUID chunkId;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private OffsetDateTime createdAt;
|
||||
}
|
@ -0,0 +1,51 @@
|
||||
package com.pjilisense.flxai.entity;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.TableField;
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import com.pjilisense.flxai.handle.JsonbTypeHandler;
|
||||
import lombok.Data;
|
||||
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.Date;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
@Data
|
||||
@TableName("global_files")
|
||||
public class GlobalFilesEntity {
|
||||
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private String hashId;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private String fileType;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private Integer size;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private String url;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
@TableField(value = "metadata", typeHandler = JsonbTypeHandler.class)
|
||||
private Map<String,Object> metadata;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private OffsetDateTime createdAt;
|
||||
/**
|
||||
* $column.comments
|
||||
*/
|
||||
private OffsetDateTime accessedAt;
|
||||
}
|
@ -0,0 +1,54 @@
|
||||
package com.pjilisense.flxai.entity;
|
||||
|
||||
import com.baomidou.mybatisplus.annotation.TableName;
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-27
|
||||
*/
|
||||
@Data
|
||||
@TableName("my_digital_human")
|
||||
public class MyDigitalHumanEntity {
|
||||
|
||||
/**
|
||||
* 主键
|
||||
*/
|
||||
private String id;
|
||||
/**
|
||||
* 用户表ID
|
||||
*/
|
||||
private String userid;
|
||||
/**
|
||||
* 数字形象ID数字图片表ID
|
||||
*/
|
||||
private String imageid;
|
||||
/**
|
||||
* 场景ID数字图片表ID
|
||||
*/
|
||||
private String sceneimgid;
|
||||
/**
|
||||
* 背景ID数字图片表ID
|
||||
*/
|
||||
private String bkimgid;
|
||||
/**
|
||||
* 选择的声音ID
|
||||
*/
|
||||
private String voiceid;
|
||||
/**
|
||||
* 编辑后的图片
|
||||
*/
|
||||
private String videoimg;
|
||||
/**
|
||||
* 视频存放目录
|
||||
*/
|
||||
private String videodir;
|
||||
/**
|
||||
* 声音存放目录
|
||||
*/
|
||||
private String voicedir;
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
package com.pjilisense.flxai.service;
|
||||
|
||||
import com.pjilisense.flxai.base.service.CrudService;
|
||||
import com.pjilisense.flxai.dto.ChunksDTO;
|
||||
import com.pjilisense.flxai.entity.ChunksEntity;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
public interface ChunksService extends CrudService<ChunksEntity, ChunksDTO> {
|
||||
|
||||
List<Map<String,Object>> search(String userid,String text);
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package com.pjilisense.flxai.service;
|
||||
|
||||
import com.pjilisense.flxai.base.service.CrudService;
|
||||
import com.pjilisense.flxai.dto.DigitalImgDTO;
|
||||
import com.pjilisense.flxai.entity.DigitalImgEntity;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-26
|
||||
*/
|
||||
public interface DigitalImgService extends CrudService<DigitalImgEntity, DigitalImgDTO> {
|
||||
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package com.pjilisense.flxai.service;
|
||||
|
||||
import com.pjilisense.flxai.base.service.CrudService;
|
||||
import com.pjilisense.flxai.dto.FileChunksDTO;
|
||||
import com.pjilisense.flxai.entity.FileChunksEntity;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
public interface FileChunksService extends CrudService<FileChunksEntity, FileChunksDTO> {
|
||||
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package com.pjilisense.flxai.service;
|
||||
|
||||
import com.pjilisense.flxai.base.service.CrudService;
|
||||
import com.pjilisense.flxai.dto.GlobalFilesDTO;
|
||||
import com.pjilisense.flxai.entity.GlobalFilesEntity;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
public interface GlobalFilesService extends CrudService<GlobalFilesEntity, GlobalFilesDTO> {
|
||||
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
package com.pjilisense.flxai.service;
|
||||
|
||||
import com.pjilisense.flxai.base.service.CrudService;
|
||||
import com.pjilisense.flxai.dto.MyDigitalHumanDTO;
|
||||
import com.pjilisense.flxai.entity.MyDigitalHumanEntity;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-26
|
||||
*/
|
||||
public interface MyDigitalHumanService extends CrudService<MyDigitalHumanEntity, MyDigitalHumanDTO> {
|
||||
|
||||
}
|
@ -0,0 +1,296 @@
|
||||
package com.pjilisense.flxai.service.impl;
|
||||
|
||||
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
||||
import com.hankcs.hanlp.HanLP;
|
||||
import com.hankcs.hanlp.seg.Segment;
|
||||
import com.hankcs.hanlp.seg.common.Term;
|
||||
import com.pjilisense.flxai.base.service.impl.CrudServiceImpl;
|
||||
import com.pjilisense.flxai.dao.ChunksDao;
|
||||
import com.pjilisense.flxai.dto.ChunksDTO;
|
||||
import com.pjilisense.flxai.entity.ChunksEntity;
|
||||
import com.pjilisense.flxai.service.ChunksService;
|
||||
import cn.hutool.core.util.StrUtil;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
@Service
|
||||
public class ChunksServiceImpl extends CrudServiceImpl<ChunksDao, ChunksEntity, ChunksDTO> implements ChunksService {
|
||||
|
||||
@Override
|
||||
public QueryWrapper<ChunksEntity> getWrapper(Map<String, Object> params){
|
||||
String id = (String)params.get("id");
|
||||
|
||||
QueryWrapper<ChunksEntity> wrapper = new QueryWrapper<>();
|
||||
wrapper.eq(StrUtil.isNotBlank(id), "id", id);
|
||||
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<Map<String, Object>> search(String userid,String keyText) {
|
||||
return search1( userid, keyText);
|
||||
}
|
||||
|
||||
private List<Map<String, Object>> search1(String userid,String keyText) {
|
||||
Segment seg= HanLP.newSegment();
|
||||
seg.enableCustomDictionary(false);
|
||||
List<Term> lst = seg.seg(keyText);
|
||||
ArrayList<String> listWord=processTermList(lst);
|
||||
//ArrayList<String> listSentence=processTermListx(lst);
|
||||
LinkedHashSet<String> set = new LinkedHashSet<>();
|
||||
set.addAll(listWord);
|
||||
//set.addAll(listSentence);
|
||||
listWord.clear();
|
||||
//listSentence.clear();
|
||||
ArrayList<String> wordList=processTermList(lst);
|
||||
wordList.addAll(set);
|
||||
set.clear();
|
||||
HashMap<String,Object> map = new HashMap<>();
|
||||
map.put("userid",userid);
|
||||
map.put("wordList",wordList);
|
||||
List<Map<String,Object>> list = baseDao.queryCsv(map);
|
||||
List<Map<String,Object>> listRet = new ArrayList<>() ;
|
||||
if(list!=null && list.size()>0) {
|
||||
ArrayList<String> listText=processTermListN0Com(lst);
|
||||
List<String> idList = new ArrayList<>();
|
||||
for (Map<String, Object> mp: list){
|
||||
String ttext = String.valueOf(mp.get("text"));
|
||||
int pos = ttext.indexOf(',');
|
||||
String question = ttext.substring(0,pos);
|
||||
String answer = ttext.substring(pos+1);
|
||||
mp.put("question",question);
|
||||
mp.put("answer",answer);
|
||||
List<Term> lst2 = seg.seg(question);
|
||||
ArrayList<String> listQuestion=processTermListN0Com(lst2);
|
||||
double similarity =calculateSimilarity(listText, listQuestion);
|
||||
BigDecimal bd = new BigDecimal(similarity);
|
||||
if(bd.compareTo(BigDecimal.ZERO.stripTrailingZeros())==0){
|
||||
idList.add(String.valueOf(mp.get("id")));
|
||||
} else {
|
||||
System.out.println(bd.toString());
|
||||
mp.put("similarity",bd.toString());
|
||||
}
|
||||
}
|
||||
for(Map<String, Object> mp: list){
|
||||
String id=String.valueOf(mp.get("id"));
|
||||
if(!idList.contains(id)){
|
||||
listRet.add(mp);
|
||||
}
|
||||
}
|
||||
list.clear();
|
||||
list.addAll(listRet);
|
||||
Collections.sort(list, new Comparator<Map<String, Object>>() {
|
||||
@Override
|
||||
public int compare(Map<String, Object> o1, Map<String, Object> o2) {
|
||||
BigDecimal bd1 = new BigDecimal(String.valueOf(o1.get("similarity")));
|
||||
BigDecimal bd2 = new BigDecimal(String.valueOf(o2.get("similarity")));
|
||||
return bd1.compareTo(bd2)*(-1);
|
||||
}
|
||||
});
|
||||
}
|
||||
return list;
|
||||
}
|
||||
private List<Map<String, Object>> search2(String userid,String keyText) {
|
||||
Segment seg= HanLP.newSegment();
|
||||
seg.enableCustomDictionary(false);
|
||||
List<Term> lst = seg.seg(keyText);
|
||||
ArrayList<String> listWord=processTermList(lst);
|
||||
LinkedHashSet<String> set = new LinkedHashSet<>();
|
||||
set.addAll(listWord);
|
||||
listWord.clear();
|
||||
ArrayList<String> wordList=processTermList(lst);
|
||||
wordList.addAll(set);
|
||||
set.clear();
|
||||
HashMap<String,Object> map = new HashMap<>();
|
||||
map.put("userid",userid);
|
||||
map.put("wordList",wordList);
|
||||
List<Map<String,Object>> list = baseDao.queryCsv(map);
|
||||
List<Map<String,Object>> listRet = new ArrayList<>() ;
|
||||
if(list!=null && list.size()>0) {
|
||||
ArrayList<String> listText=processTermListN0Com(lst);
|
||||
List<String> idList = new ArrayList<>();
|
||||
for (Map<String, Object> mp: list){
|
||||
String ttext = String.valueOf(mp.get("text"));
|
||||
List<Term> lst2 = seg.seg(ttext);
|
||||
ArrayList<String> listQuestion=processTermListN0Com(lst2);
|
||||
double similarity =calculateSimilarity(listText, listQuestion);
|
||||
BigDecimal bd = new BigDecimal(similarity);
|
||||
if(bd.compareTo(BigDecimal.ZERO.stripTrailingZeros())==0){
|
||||
idList.add(String.valueOf(mp.get("id")));
|
||||
} else {
|
||||
System.out.println(bd.toString());
|
||||
mp.put("similarity",bd.toString());
|
||||
}
|
||||
}
|
||||
for(Map<String, Object> mp: list){
|
||||
String id=String.valueOf(mp.get("id"));
|
||||
if(!idList.contains(id)){
|
||||
listRet.add(mp);
|
||||
}
|
||||
}
|
||||
list.clear();
|
||||
list.addAll(listRet);
|
||||
Collections.sort(list, new Comparator<Map<String, Object>>() {
|
||||
@Override
|
||||
public int compare(Map<String, Object> o1, Map<String, Object> o2) {
|
||||
BigDecimal bd1 = new BigDecimal(String.valueOf(o1.get("similarity")));
|
||||
BigDecimal bd2 = new BigDecimal(String.valueOf(o2.get("similarity")));
|
||||
return bd1.compareTo(bd2)*(-1);
|
||||
}
|
||||
});
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
private static ArrayList<String> processTermList(List<Term> lst) {
|
||||
ArrayList<String> ret = new ArrayList<>();
|
||||
StringBuilder sd = new StringBuilder();
|
||||
for(int i=0;i<lst.size();i++) {
|
||||
Term term = lst.get(i);
|
||||
if(term.nature.startsWith('w')) {
|
||||
if(sd.length()>0) {
|
||||
ret.add(sd.toString());
|
||||
sd = sd.delete(0,sd.length());
|
||||
}
|
||||
continue;
|
||||
}else {
|
||||
String word = term.word;
|
||||
if(word.length()==1) {
|
||||
sd.append(word);
|
||||
if(i+1< lst.size()) {
|
||||
Term termx = lst.get(i+1);
|
||||
if(!termx.nature.startsWith('w')) {
|
||||
String wordx = termx.word;
|
||||
sd.append(wordx);
|
||||
if(wordx.length()>1) {
|
||||
ret.add(sd.toString());
|
||||
sd = sd.delete(0,sd.length());
|
||||
}
|
||||
i++;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sd.append(word);
|
||||
ret.add(sd.toString());
|
||||
sd = sd.delete(0,sd.length());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
// System.out.println("ret=");
|
||||
// for(String term : ret) {
|
||||
// System.out.println(term);
|
||||
// }
|
||||
return ret;
|
||||
}
|
||||
|
||||
private static ArrayList<String> processTermListN0Com(List<Term> lst) {
|
||||
ArrayList<String> ret = new ArrayList<>();
|
||||
StringBuilder sd = new StringBuilder();
|
||||
for(int i=0;i<lst.size();i++) {
|
||||
Term term = lst.get(i);
|
||||
if(term.nature.startsWith('w')) {
|
||||
if(sd.length()>0) {
|
||||
ret.add(sd.toString());
|
||||
sd = sd.delete(0,sd.length());
|
||||
}
|
||||
continue;
|
||||
}else {
|
||||
String word = term.word;
|
||||
if(word.length()==1) {
|
||||
sd.append(word);
|
||||
if(i+1< lst.size()) {
|
||||
Term termx = lst.get(i+1);
|
||||
if(!termx.nature.startsWith('w')) {
|
||||
String wordx = termx.word;
|
||||
sd.append(wordx);
|
||||
if(wordx.length()>0) {
|
||||
ret.add(sd.toString());
|
||||
sd = sd.delete(0,sd.length());
|
||||
}
|
||||
i++;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sd.append(word);
|
||||
ret.add(sd.toString());
|
||||
sd = sd.delete(0,sd.length());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
// System.out.println("ret=");
|
||||
// for(String term : ret) {
|
||||
// System.out.println(term);
|
||||
// }
|
||||
return ret;
|
||||
}
|
||||
private static ArrayList<String> processTermListx(List<Term> lst) {
|
||||
ArrayList<String> ret = new ArrayList<>();
|
||||
StringBuilder sd = new StringBuilder();
|
||||
for(int i=0;i<lst.size();i++) {
|
||||
Term term = lst.get(i);
|
||||
if(term.nature.startsWith('w')) {
|
||||
if(sd.length()>0) {
|
||||
ret.add(sd.toString());
|
||||
sd = sd.delete(0,sd.length());
|
||||
}
|
||||
continue;
|
||||
}else {
|
||||
String word = term.word;
|
||||
sd.append(word);
|
||||
}
|
||||
}
|
||||
// System.out.println("ret=");
|
||||
// for(String term : ret) {
|
||||
// System.out.println(term);
|
||||
// }
|
||||
return ret;
|
||||
}
|
||||
|
||||
// 创建单词频率映射
|
||||
private Map<String, Integer> createFrequencyMap(List<String> words) {
|
||||
Map<String, Integer> freqMap = new HashMap<>();
|
||||
for (String word : words) {
|
||||
freqMap.put(word, freqMap.getOrDefault(word, 0) + 1);
|
||||
}
|
||||
return freqMap;
|
||||
}
|
||||
|
||||
// 计算余弦相似度
|
||||
private double calculateSimilarity(List<String> words1, List<String> words2) {
|
||||
// List<String> words1 = preprocessText(text1);
|
||||
// List<String> words2 = preprocessText(text2);
|
||||
|
||||
Map<String, Integer> freqMap1 = createFrequencyMap(words1);
|
||||
Map<String, Integer> freqMap2 = createFrequencyMap(words2);
|
||||
|
||||
double dotProduct = 0.0;
|
||||
double magnitude1 = 0.0;
|
||||
double magnitude2 = 0.0;
|
||||
|
||||
for (String word : freqMap1.keySet()) {
|
||||
int freq1 = freqMap1.get(word);
|
||||
magnitude1 += freq1 * freq1;
|
||||
if (freqMap2.containsKey(word)) {
|
||||
int freq2 = freqMap2.get(word);
|
||||
dotProduct += freq1 * freq2;
|
||||
}
|
||||
}
|
||||
|
||||
for (int freq : freqMap2.values()) {
|
||||
magnitude2 += freq * freq;
|
||||
}
|
||||
|
||||
return dotProduct / (Math.sqrt(magnitude1) * Math.sqrt(magnitude2));
|
||||
}
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
package com.pjilisense.flxai.service.impl;
|
||||
|
||||
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
||||
import com.pjilisense.flxai.base.service.impl.CrudServiceImpl;
|
||||
import com.pjilisense.flxai.dao.DigitalImgDao;
|
||||
import com.pjilisense.flxai.dto.DigitalImgDTO;
|
||||
import com.pjilisense.flxai.entity.DigitalImgEntity;
|
||||
import com.pjilisense.flxai.service.DigitalImgService;
|
||||
import cn.hutool.core.util.StrUtil;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-26
|
||||
*/
|
||||
@Service
|
||||
public class DigitalImgServiceImpl extends CrudServiceImpl<DigitalImgDao, DigitalImgEntity, DigitalImgDTO> implements DigitalImgService {
|
||||
|
||||
@Override
|
||||
public QueryWrapper<DigitalImgEntity> getWrapper(Map<String, Object> params){
|
||||
String id = (String)params.get("id");
|
||||
|
||||
QueryWrapper<DigitalImgEntity> wrapper = new QueryWrapper<>();
|
||||
wrapper.eq(StrUtil.isNotBlank(id), "id", id);
|
||||
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
package com.pjilisense.flxai.service.impl;
|
||||
|
||||
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
||||
import com.pjilisense.flxai.base.service.impl.CrudServiceImpl;
|
||||
import com.pjilisense.flxai.dao.FileChunksDao;
|
||||
import com.pjilisense.flxai.dto.FileChunksDTO;
|
||||
import com.pjilisense.flxai.entity.FileChunksEntity;
|
||||
import com.pjilisense.flxai.service.FileChunksService;
|
||||
import cn.hutool.core.util.StrUtil;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
@Service
|
||||
public class FileChunksServiceImpl extends CrudServiceImpl<FileChunksDao, FileChunksEntity, FileChunksDTO> implements FileChunksService {
|
||||
|
||||
@Override
|
||||
public QueryWrapper<FileChunksEntity> getWrapper(Map<String, Object> params){
|
||||
String id = (String)params.get("id");
|
||||
|
||||
QueryWrapper<FileChunksEntity> wrapper = new QueryWrapper<>();
|
||||
wrapper.eq(StrUtil.isNotBlank(id), "id", id);
|
||||
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
package com.pjilisense.flxai.service.impl;
|
||||
|
||||
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
||||
import com.pjilisense.flxai.base.service.impl.CrudServiceImpl;
|
||||
import com.pjilisense.flxai.dao.GlobalFilesDao;
|
||||
import com.pjilisense.flxai.dto.GlobalFilesDTO;
|
||||
import com.pjilisense.flxai.entity.GlobalFilesEntity;
|
||||
import com.pjilisense.flxai.service.GlobalFilesService;
|
||||
import cn.hutool.core.util.StrUtil;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-10
|
||||
*/
|
||||
@Service
|
||||
public class GlobalFilesServiceImpl extends CrudServiceImpl<GlobalFilesDao, GlobalFilesEntity, GlobalFilesDTO> implements GlobalFilesService {
|
||||
|
||||
@Override
|
||||
public QueryWrapper<GlobalFilesEntity> getWrapper(Map<String, Object> params){
|
||||
String id = (String)params.get("id");
|
||||
|
||||
QueryWrapper<GlobalFilesEntity> wrapper = new QueryWrapper<>();
|
||||
wrapper.eq(StrUtil.isNotBlank(id), "id", id);
|
||||
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
package com.pjilisense.flxai.service.impl;
|
||||
|
||||
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
|
||||
import com.pjilisense.flxai.base.service.impl.CrudServiceImpl;
|
||||
import com.pjilisense.flxai.dao.MyDigitalHumanDao;
|
||||
import com.pjilisense.flxai.dto.MyDigitalHumanDTO;
|
||||
import com.pjilisense.flxai.entity.MyDigitalHumanEntity;
|
||||
import com.pjilisense.flxai.service.MyDigitalHumanService;
|
||||
import cn.hutool.core.util.StrUtil;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* ${comments}
|
||||
*
|
||||
* @author liushujing liushujing@philisense.com
|
||||
* @since 1.0.0 2025-02-26
|
||||
*/
|
||||
@Service
|
||||
public class MyDigitalHumanServiceImpl extends CrudServiceImpl<MyDigitalHumanDao, MyDigitalHumanEntity, MyDigitalHumanDTO> implements MyDigitalHumanService {
|
||||
|
||||
@Override
|
||||
public QueryWrapper<MyDigitalHumanEntity> getWrapper(Map<String, Object> params){
|
||||
String id = (String)params.get("id");
|
||||
|
||||
QueryWrapper<MyDigitalHumanEntity> wrapper = new QueryWrapper<>();
|
||||
wrapper.eq(StrUtil.isNotBlank(id), "id", id);
|
||||
|
||||
return wrapper;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,146 @@
|
||||
package com.pjilisense.flxai.utils;
|
||||
|
||||
import com.alibaba.fastjson.JSON;
|
||||
import com.alibaba.fastjson.JSONObject;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Repository;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
import org.springframework.web.multipart.MultipartHttpServletRequest;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.io.*;
|
||||
import java.net.URLEncoder;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.*;
|
||||
|
||||
@Repository
|
||||
public class FileUtil {
|
||||
|
||||
private final Logger logger = LoggerFactory.getLogger(this.getClass());
|
||||
|
||||
@Value("${file.common.uploadWindow}")
|
||||
private String diskPath;
|
||||
@Value("${file.common.uploadLinux}")
|
||||
private String uploadLinux;
|
||||
|
||||
@Value("${file.common.uploadUrl}")
|
||||
private String uploadUrl;
|
||||
|
||||
public String uploadFile(MultipartFile multipartFile){
|
||||
String relPath="uploads/";
|
||||
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMM");
|
||||
relPath = relPath+sdf.format(new Date()).substring(0,6)+"/";
|
||||
//⽂件的完整名称,如spring.jpeg
|
||||
String filename = multipartFile.getOriginalFilename();
|
||||
//⽂件后缀,如.jpeg
|
||||
assert filename != null;
|
||||
String suffix = filename.substring(filename.lastIndexOf("."));
|
||||
relPath =relPath+"/"+UUID.randomUUID().toString().replace("-","")+"."+suffix;
|
||||
//⽬标⽂件
|
||||
File descFile = new File(getFilepath(relPath));
|
||||
//判断⽬标⽂件所在的⽬录是否存在
|
||||
if (!descFile.getParentFile().exists()) {
|
||||
descFile.getParentFile().mkdirs();
|
||||
}
|
||||
try (InputStream is = multipartFile.getInputStream();
|
||||
BufferedInputStream bis = new BufferedInputStream(is);
|
||||
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(descFile))) {
|
||||
int num = 0;
|
||||
while ((num = bis.read()) != -1) {
|
||||
bos.write(num);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
//log.error(e.getMessage());
|
||||
throw new RuntimeException("文件上传错误,请联系管理员");
|
||||
}
|
||||
return relPath;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @return int 删除结果 1 已删除 0 未删除
|
||||
*/
|
||||
public int delFile(String filepath, String fileName) {
|
||||
String rootPath = getFilepath (filepath);
|
||||
File file = new File(rootPath);
|
||||
if (file.exists()) {
|
||||
if(file.isDirectory()) {
|
||||
File filex = new File(rootPath+"/"+fileName);
|
||||
if(filex.exists()) {
|
||||
filex.delete();
|
||||
}
|
||||
return 1;
|
||||
} else {
|
||||
file.delete();
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
public void downLoadFile(String filepath, String filename, HttpServletResponse response){
|
||||
String rootPath = getFilepath (filepath);
|
||||
File file = new File(rootPath);
|
||||
if(file.exists()){
|
||||
InputStream inputStream = null;
|
||||
OutputStream outputStream = null;
|
||||
try {
|
||||
// 读到流中
|
||||
if(file.isDirectory()) {
|
||||
inputStream = new FileInputStream(filepath +"/"+ filename);// 文件的存放路径
|
||||
} else {
|
||||
inputStream = new FileInputStream(filepath);//filepath可能包含文件名
|
||||
}
|
||||
response.reset();
|
||||
response.setContentType("application/octet-stream");
|
||||
response.addHeader("Content-Disposition", "attachment; filename=" + URLEncoder.encode(filename, "UTF-8"));
|
||||
outputStream = response.getOutputStream();
|
||||
byte[] b = new byte[1024];
|
||||
int len;
|
||||
//从输入流中读取一定数量的字节,并将其存储在缓冲区字节数组中,读到末尾返回-1
|
||||
while ((len = inputStream.read(b)) > 0) {
|
||||
outputStream.write(b, 0, len);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
//log.error(e.getMessage(), e);
|
||||
}finally {
|
||||
try {
|
||||
inputStream.close();
|
||||
inputStream.close();
|
||||
} catch (IOException e) {
|
||||
//log.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public String getFilepath (final String filepath) {//filepath可能包含文件名
|
||||
String rootPath = getRootpath();
|
||||
rootPath = rootPath +"/"+ filepath;
|
||||
rootPath =rootPath.replace("//","/");
|
||||
if(rootPath.endsWith("/")){
|
||||
rootPath=rootPath.substring(0,rootPath.length()-1);
|
||||
}
|
||||
return rootPath;
|
||||
}
|
||||
|
||||
public String getRootpath () {//filepath可能包含文件名
|
||||
String rootPath = null;
|
||||
if (System.getProperty("os.name").startsWith("Windows")) {
|
||||
rootPath = diskPath;
|
||||
} else if (System.getProperty("os.name").startsWith("Linux")) {
|
||||
rootPath = uploadLinux;
|
||||
}
|
||||
rootPath =rootPath.replace("//","/");
|
||||
if(rootPath.endsWith("/")){
|
||||
rootPath=rootPath.substring(0,rootPath.length()-1);
|
||||
}
|
||||
return rootPath;
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -0,0 +1,38 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
|
||||
|
||||
<mapper namespace="com.pjilisense.flxai.dao.ChunksDao">
|
||||
|
||||
<resultMap type="com.pjilisense.flxai.entity.ChunksEntity" id="chunksMap">
|
||||
<result property="id" column="id" typeHandler="com.pjilisense.flxai.handle.UUIDTypeHandler"/>
|
||||
<result property="text" column="text"/>
|
||||
<result property="abstractx" column="abstract"/>
|
||||
<result property="metadata" column="metadata" typeHandler="com.pjilisense.flxai.handle.JsonbTypeHandler"/>
|
||||
<result property="index" column="index"/>
|
||||
<result property="type" column="type"/>
|
||||
<result property="createdAt" column="created_at" jdbcType="TIMESTAMP_WITH_TIMEZONE" javaType="java.time.OffsetDateTime" />
|
||||
<result property="updatedAt" column="updated_at" jdbcType="TIMESTAMP_WITH_TIMEZONE" javaType="java.time.OffsetDateTime" />
|
||||
<result property="userId" column="user_id"/>
|
||||
<result property="accessedAt" column="accessed_at" jdbcType="TIMESTAMP_WITH_TIMEZONE" javaType="java.time.OffsetDateTime"/>
|
||||
</resultMap>
|
||||
|
||||
<resultMap type="java.util.Map" id="BaseResultMap">
|
||||
<result property="id" column="id" typeHandler="com.pjilisense.flxai.handle.UUIDTypeHandler"/>
|
||||
<result property="text" column="text"/>
|
||||
<result property="fid" column="fid"/>
|
||||
<result property="fname" column="fname" />
|
||||
</resultMap>
|
||||
|
||||
<select id="queryCsv" resultMap="BaseResultMap" parameterType="java.util.Map">
|
||||
select a.id ,a."text" ,f.id as fid ,f."name" as fname from chunks a left join file_chunks fc on fc.chunk_id =a.id
|
||||
left join files f on f.id =fc.file_id
|
||||
where f.file_type ='text/plain' and a.user_id =#{userid}
|
||||
<if test="wordList != null">
|
||||
AND
|
||||
<foreach collection="wordList" item="word" index="index" open="(" close=")" separator="or">
|
||||
a."text" like '%${word}%'
|
||||
</foreach>
|
||||
</if>
|
||||
</select>
|
||||
|
||||
</mapper>
|
@ -0,0 +1,14 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
|
||||
|
||||
<mapper namespace="com.pjilisense.flxai.dao.DigitalImgDao">
|
||||
|
||||
<resultMap type="com.pjilisense.flxai.entity.DigitalImgEntity" id="digitalImgMap">
|
||||
<result property="id" column="id"/>
|
||||
<result property="name" column="name"/>
|
||||
<result property="imgPath" column="img_path"/>
|
||||
<result property="imgType" column="img_type"/>
|
||||
</resultMap>
|
||||
|
||||
|
||||
</mapper>
|
@ -0,0 +1,13 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
|
||||
|
||||
<mapper namespace="com.pjilisense.flxai.dao.FileChunksDao">
|
||||
|
||||
<resultMap type="com.pjilisense.flxai.entity.FileChunksEntity" id="fileChunksMap">
|
||||
<result property="fileId" column="file_id"/>
|
||||
<result property="chunkId" column="chunk_id" typeHandler="com.pjilisense.flxai.handle.UUIDTypeHandler"/>
|
||||
<result property="createdAt" column="created_at" jdbcType="TIMESTAMP_WITH_TIMEZONE" javaType="java.time.OffsetDateTime"/>
|
||||
</resultMap>
|
||||
|
||||
|
||||
</mapper>
|
@ -0,0 +1,17 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
|
||||
|
||||
<mapper namespace="com.pjilisense.flxai.dao.GlobalFilesDao">
|
||||
|
||||
<resultMap type="com.pjilisense.flxai.entity.GlobalFilesEntity" id="globalFilesMap">
|
||||
<result property="hashId" column="hash_id"/>
|
||||
<result property="fileType" column="file_type"/>
|
||||
<result property="size" column="size"/>
|
||||
<result property="url" column="url"/>
|
||||
<result property="metadata" column="metadata" typeHandler="com.pjilisense.flxai.handle.JsonbTypeHandler"/>
|
||||
<result property="createdAt" column="created_at" jdbcType="TIMESTAMP_WITH_TIMEZONE" javaType="java.time.OffsetDateTime"/>
|
||||
<result property="accessedAt" column="accessed_at" jdbcType="TIMESTAMP_WITH_TIMEZONE" javaType="java.time.OffsetDateTime"/>
|
||||
</resultMap>
|
||||
|
||||
|
||||
</mapper>
|
@ -0,0 +1,29 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
|
||||
|
||||
<mapper namespace="com.pjilisense.flxai.dao.MyDigitalHumanDao">
|
||||
|
||||
<resultMap type="com.pjilisense.flxai.entity.MyDigitalHumanEntity" id="myDigitalHumanMap">
|
||||
<result property="id" column="id"/>
|
||||
<result property="userid" column="userid"/>
|
||||
<result property="imageid" column="imageid"/>
|
||||
<result property="sceneimgid" column="sceneimgid"/>
|
||||
<result property="bkimgid" column="bkimgid"/>
|
||||
<result property="voiceid" column="voiceid"/>
|
||||
<result property="videoimg" column="videoimg"/>
|
||||
<result property="videodir" column="videodir"/>
|
||||
<result property="voicedir" column="voicedir"/>
|
||||
</resultMap>
|
||||
<resultMap type="java.util.Map" id="BaseResultMap">
|
||||
<result property="id" column="id"/>
|
||||
<result property="userid" column="userid"/>
|
||||
<result property="imageid" column="imageid"/>
|
||||
<result property="sceneimgid" column="sceneimgid"/>
|
||||
<result property="bkimgid" column="bkimgid"/>
|
||||
<result property="voiceid" column="voiceid"/>
|
||||
<result property="videoimg" column="videoimg"/>
|
||||
<result property="videodir" column="videodir"/>
|
||||
<result property="voicedir" column="voicedir"/>
|
||||
</resultMap>
|
||||
|
||||
</mapper>
|
Loading…
Reference in New Issue