去掉logstash tcp配置

代码生成器代码恢复
docker打包问题修复
This commit is contained in:
uhonliu 2020-04-02 14:57:27 +08:00
parent e93b2c4c99
commit e3c724d071
51 changed files with 800 additions and 150 deletions

1
.gitignore vendored
View File

@ -36,7 +36,6 @@ Thumbs.db
Servers
.metadata
upload
generator
temp
# docker ignore

View File

@ -129,8 +129,6 @@ open-platform
<discovery.server-addr>127.0.0.1:8848</discovery.server-addr>
<!--Nacos服务发现命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空-->
<discovery.namespace></discovery.namespace>
<!--Logstash日志收集地址-->
<logstash.destination>127.0.0.1:5000</logstash.destination>
```
5. 本地启动(按顺序启动)
@ -155,7 +153,7 @@ open-platform
7. 项目打包部署
+ maven多环境打包,替换变量
``` bash
mvn clean install package -P {dev|test|uat|online}
mvn clean package -P {dev|test|uat|online} -DskipTests
```
+ 项目启动
``` bash
@ -168,13 +166,13 @@ open-platform
+ 配置DOCKER私服仓库
+ maven多环境打包,替换变量.并构建docker镜像
``` bash
clean install package -P {dev|test|uat|online} dockerfile:build
mvn clean package -P {dev|test|uat|online} -DskipTests dockerfile:build dockerfile:push
```
+ 启动docker镜像
```bash
docker run -d -e JAVA_OPTS="-Xms128m -Xmx768m" -p 8233:8233 --name base-server platform/base-server:3.0.0
docker run -d -e JAVA_OPTS="-Xms128m -Xmx768m" -p 8211:8211 --name uaa-admin-server platform/uaa-admin-server:3.0.0
docker run -d -e JAVA_OPTS="-Xms128m -Xmx768m" -p 8888:8888 --name api-spring-server platform/api-spring-server:3.0.0
docker run -d -e JAVA_OPTS="-Xms128m -Xmx768m" -p 8233:8233 --name base-server platform/base-server:3.1.0
docker run -d -e JAVA_OPTS="-Xms128m -Xmx768m" -p 8211:8211 --name uaa-admin-server platform/uaa-admin-server:3.1.0
docker run -d -e JAVA_OPTS="-Xms128m -Xmx768m" -p 8888:8888 --name api-spring-server platform/api-spring-server:3.1.0
```
#### 参考项目及文档

View File

@ -42,10 +42,6 @@
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-bus-amqp</artifactId>
</dependency>
<!--<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-sleuth</artifactId>
</dependency>-->
<!--Redis-->
<dependency>
<groupId>org.springframework.boot</groupId>

75
docs/bin/startup-sw.sh Normal file
View File

@ -0,0 +1,75 @@
#!/usr/bin/env bash
INPUT=$2
SW_SERVER=$3
FILE_PATH=$(readlink -f ${INPUT})
SERVICE=${INPUT##*/}
SERVICE_NAME=${SERVICE%.*}
DEPLOY_DIR=$(pwd)
JVM_OPTS="-server -Xms128m -Xmx768m -XX:+UseG1GC -XX:SurvivorRatio=6 -XX:MaxGCPauseMillis=400 -XX:G1ReservePercent=15 -XX:ParallelGCThreads=4 -XX:ConcGCThreads=1 -XX:InitiatingHeapOccupancyPercent=40 -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -Xloggc:../logs/gc.log -javaagent:skywalking-agent.jar -Dskywalking.agent.service_name=$SERVICE_NAME -Dskywalking.collector.backend_service=$SW_SERVER"
if [[ "$1" == "" ]]; then
echo -e "\033[0;31m 未输入操作名 \033[0m \033[0;34m {start|stop|restart|status} \033[0m"
exit 1
fi
if [[ "$SERVICE" == "" ]]; then
echo -e "\033[0;31m 未输入应用名 \033[0m"
exit 1
fi
LOGS_DIR="$DEPLOY_DIR/logs/$SERVICE_NAME"
echo "$LOGS_DIR"
if [[ ! -d "$LOGS_DIR" ]]; then
mkdir -p ${LOGS_DIR}
fi
LOG_PATH="$LOGS_DIR/stdout.out"
pid=0
start() {
checkPid
if [[ ! -n "$pid" ]]; then
BUILD_ID=dontKillMe nohup java ${JVM_OPTS} -jar ${FILE_PATH} >>${LOG_PATH} 2>&1 &
echo "$SERVICE_NAME is starting you can check the $LOG_PATH"
else
echo "$SERVICE_NAME is runing PID: $pid"
fi
}
checkPid() {
pid=$(ps -ef | grep ${FILE_PATH} | grep -v grep | awk '{print $2}')
}
stop() {
checkPid
if [[ ! -n "$pid" ]]; then
echo "$SERVICE_NAME not runing"
else
echo "$SERVICE_NAME stop..."
kill -9 ${pid}
fi
}
restart() {
stop
sleep 2
start
}
status() {
checkPid
if [[ ! -n "$pid" ]]; then
echo "$SERVICE_NAME not runing"
else
echo "$SERVICE_NAME runing PID: $pid"
fi
}
case $1 in
start) start ;;
stop) stop ;;
restart) restart ;;
status) status ;;
*) echo "require start|stop|restart|status" ;;
esac

View File

@ -5,7 +5,7 @@ FILE_PATH=$(readlink -f ${INPUT})
SERVICE=${INPUT##*/}
SERVICE_NAME=${SERVICE%.*}
DEPLOY_DIR=$(pwd)
JVM_OPTS="-server -Xms128m -Xmx768m"
JVM_OPTS="-server -Xms128m -Xmx768m -XX:+UseG1GC -XX:SurvivorRatio=6 -XX:MaxGCPauseMillis=400 -XX:G1ReservePercent=15 -XX:ParallelGCThreads=4 -XX:ConcGCThreads=1 -XX:InitiatingHeapOccupancyPercent=40 -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -Xloggc:../logs/gc.log"
if [[ "$1" == "" ]]; then
echo -e "\033[0;31m 未输入操作名 \033[0m \033[0;34m {start|stop|restart|status} \033[0m"

View File

@ -5,4 +5,5 @@ ALTER TABLE `base_app` ADD COLUMN `encrypt_type` VARCHAR(10) NOT NULL DEFAULT 'A
ALTER TABLE `base_app` ADD COLUMN `public_key` VARCHAR(2048) NOT NULL DEFAULT '' COMMENT 'RSA加解密公钥' AFTER `encrypt_type`;
# gateway.sql
ALTER TABLE `gateway_access_logs` MODIFY error VARCHAR(2000) DEFAULT NULL COMMENT '错误信息';
ALTER TABLE `gateway_access_logs` MODIFY `error` VARCHAR(2000) DEFAULT NULL COMMENT '错误信息';
ALTER TABLE `gateway_access_logs` MODIFY `authentication` MEDIUMTEXT DEFAULT NULL COMMENT '认证信息';

View File

@ -112,8 +112,6 @@ opencloud:
enabled: true
description: API开放网关
title: API开放网关
logstash:
destination: ${logstash.destination}
# 开放api
api:
# 参数签名验证

View File

@ -2,8 +2,6 @@
<configuration debug="false" scan="false">
<springProperty scope="context" name="spring.application.name" source="spring.application.name"
defaultValue="logs"/>
<springProperty scope="context" name="logstash.destination" source="opencloud.logstash.destination"
defaultValue="127.0.0.1:5000"/>
<property name="log.path" value="logs/${spring.application.name}"/>
<!-- Console log output -->
@ -50,8 +48,13 @@
</appender>
<!-- Appender to log to file in a JSON format -->
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>${logstash.destination}</destination>
<appender name="logstash" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/logstash.json</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/%d{yyyy-MM}/logstash.%d{yyyy-MM-dd}.%i.json.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>

View File

@ -2,8 +2,6 @@
<configuration debug="false" scan="false">
<springProperty scope="context" name="spring.application.name" source="spring.application.name"
defaultValue="logs"/>
<springProperty scope="context" name="logstash.destination" source="opencloud.logstash.destination"
defaultValue="127.0.0.1:5000"/>
<property name="log.path" value="logs/${spring.application.name}"/>
<!-- Console log output -->
@ -50,8 +48,13 @@
</appender>
<!-- Appender to log to file in a JSON format -->
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>${logstash.destination}</destination>
<appender name="logstash" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/logstash.json</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/%d{yyyy-MM}/logstash.%d{yyyy-MM-dd}.%i.json.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>

View File

@ -63,7 +63,7 @@ public class ResourceServerConfiguration extends ResourceServerConfigurerAdapter
.authorizeRequests()
// 监控端点内部放行
.requestMatchers(EndpointRequest.toAnyEndpoint()).permitAll()
// fegin访问或无需身份认证
// feign访问或无需身份认证
.antMatchers(
"/authority/access",
"/authority/app",

View File

@ -42,8 +42,6 @@ opencloud:
enabled: true
description: 平台基础服务器
title: 平台基础服务器
logstash:
destination: ${logstash.destination}
#mybatis plus 设置
mybatis-plus:

View File

@ -2,8 +2,6 @@
<configuration debug="false" scan="false">
<springProperty scope="context" name="spring.application.name" source="spring.application.name"
defaultValue="logs"/>
<springProperty scope="context" name="logstash.destination" source="opencloud.logstash.destination"
defaultValue="127.0.0.1:5000"/>
<property name="log.path" value="logs/${spring.application.name}"/>
<!-- Console log output -->
@ -50,8 +48,13 @@
</appender>
<!-- Appender to log to file in a JSON format -->
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>${logstash.destination}</destination>
<appender name="logstash" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/logstash.json</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/%d{yyyy-MM}/logstash.%d{yyyy-MM-dd}.%i.json.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>

View File

@ -0,0 +1,19 @@
package com.opencloud.generator.server;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
/**
* 平台基础服务
* 提供系统用户权限分配资源客户端管理
*
* @author liuyadu
*/
@EnableDiscoveryClient
@SpringBootApplication
public class GeneratorApplication {
public static void main(String[] args) {
SpringApplication.run(GeneratorApplication.class, args);
}
}

View File

@ -0,0 +1,56 @@
package com.opencloud.generator.server.configuration;
import com.opencloud.common.exception.OpenAccessDeniedHandler;
import com.opencloud.common.exception.OpenAuthenticationEntryPoint;
import com.opencloud.common.security.OpenHelper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.autoconfigure.security.servlet.EndpointRequest;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.http.SessionCreationPolicy;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableResourceServer;
import org.springframework.security.oauth2.config.annotation.web.configuration.ResourceServerConfigurerAdapter;
import org.springframework.security.oauth2.config.annotation.web.configurers.ResourceServerSecurityConfigurer;
/**
* oauth2资源服务器配置
*
* @author: liuyadu
* @date: 2018/10/23 10:31
* @description:
*/
@Configuration
@EnableResourceServer
public class ResourceServerConfiguration extends ResourceServerConfigurerAdapter {
@Autowired
private RedisConnectionFactory redisConnectionFactory;
@Override
public void configure(ResourceServerSecurityConfigurer resources) {
// 构建redis获取token服务类
resources.tokenServices(OpenHelper.buildRedisTokenServices(redisConnectionFactory));
}
@Override
public void configure(HttpSecurity http) throws Exception {
http.sessionManagement().sessionCreationPolicy(SessionCreationPolicy.IF_REQUIRED)
.and()
.authorizeRequests()
// 监控端点内部放行
.requestMatchers(EndpointRequest.toAnyEndpoint()).permitAll()
// feign访问或无需身份认证
.antMatchers(
"/generate/**"
).permitAll()
.anyRequest().authenticated()
.and()
//认证鉴权错误处理,为了统一异常处理每个资源服务器都应该加上
.exceptionHandling()
.accessDeniedHandler(new OpenAccessDeniedHandler())
.authenticationEntryPoint(new OpenAuthenticationEntryPoint())
.and()
.csrf().disable();
}
}

View File

@ -0,0 +1,53 @@
package com.opencloud.generator.server.controller;
import com.opencloud.common.utils.WebUtils;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import javax.servlet.http.HttpServletResponse;
import java.io.*;
/**
* @author: liuyadu
* @date: 2019/7/19 15:26
* @description:
*/
@Api(tags = "在线代码生成器")
@Controller
@RequestMapping("/generate")
public class DownLoadController {
@ApiOperation(value = "文件下载", notes = "文件下载")
@GetMapping(value = "/download")
public void download(
@RequestParam("filePath") String filePath,
HttpServletResponse response
) throws Exception {
File file = new File(filePath);
download(response, filePath, file.getName());
}
/**
* 文件下载
*
* @param response
* @param filePath
* @param fileName
* @throws IOException
*/
private void download(HttpServletResponse response, String filePath, String fileName) throws IOException {
WebUtils.setFileDownloadHeader(response, fileName);
BufferedInputStream inStream = new BufferedInputStream(new FileInputStream(filePath));
BufferedOutputStream outStream = new BufferedOutputStream(response.getOutputStream());
byte[] buffer = new byte[1024];
int bytesRead = 0;
while ((bytesRead = inStream.read(buffer)) != -1) {
outStream.write(buffer, 0, bytesRead);
}
outStream.flush();
inStream.close();
}
}

View File

@ -0,0 +1,101 @@
package com.opencloud.generator.server.controller;
import com.baomidou.mybatisplus.annotation.DbType;
import com.baomidou.mybatisplus.generator.config.*;
import com.baomidou.mybatisplus.generator.config.builder.ConfigBuilder;
import com.baomidou.mybatisplus.generator.config.po.TableInfo;
import com.google.common.collect.Maps;
import com.opencloud.common.model.ResultBody;
import com.opencloud.common.utils.DateUtils;
import com.opencloud.generator.server.service.GenerateConfig;
import com.opencloud.generator.server.service.GeneratorService;
import com.opencloud.generator.server.utils.ZipUtil;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.io.File;
import java.util.List;
import java.util.Map;
/**
* @author: liuyadu
* @date: 2019/7/19 15:26
* @description:
*/
@Api(tags = "在线代码生成器")
@RestController
@RequestMapping("/generate")
public class GenerateController {
/**
* 获取所有表信息
*
* @return
*/
@ApiOperation(value = "获取所有表信息", notes = "获取所有表信息")
@PostMapping("/tables")
public ResultBody<List<TableInfo>> tables(
@RequestParam(value = "type") String type,
@RequestParam(value = "driverName") String driverName,
@RequestParam(value = "url") String url,
@RequestParam(value = "username") String username,
@RequestParam(value = "password") String password
) {
GlobalConfig gc = new GlobalConfig();
// 数据源配置
DataSourceConfig dsc = new DataSourceConfig();
dsc.setDbType(DbType.getDbType(type));
dsc.setDriverName(driverName);
dsc.setUrl(url);
dsc.setUsername(username);
dsc.setPassword(password);
StrategyConfig strategy = new StrategyConfig();
TemplateConfig templateConfig = new TemplateConfig();
ConfigBuilder config = new ConfigBuilder(new PackageConfig(), dsc, strategy, templateConfig, gc);
List<TableInfo> list = config.getTableInfoList();
return ResultBody.ok().data(list);
}
@ApiOperation(value = "代码生成并下载", notes = "代码生成并下载")
@PostMapping("/execute")
public ResultBody<List<TableInfo>> execute(
@RequestParam(value = "type") String type,
@RequestParam(value = "driverName") String driverName,
@RequestParam(value = "url") String url,
@RequestParam(value = "username") String username,
@RequestParam(value = "password") String password,
@RequestParam(value = "author") String author,
@RequestParam(value = "parentPackage") String parentPackage,
@RequestParam(value = "moduleName") String moduleName,
@RequestParam(value = "includeTables") String includeTables,
@RequestParam(value = "tablePrefix") String tablePrefix
) throws Exception {
String outputDir = System.getProperty("user.dir") + File.separator + "temp" + File.separator + "generator" + File.separator + DateUtils.getCurrentTimestampStr();
GenerateConfig config = new GenerateConfig();
config.setDbType(DbType.getDbType(type));
config.setJdbcUrl(url);
config.setJdbcUserName(username);
config.setJdbcPassword(password);
config.setJdbcDriver(driverName);
config.setAuthor(author);
config.setParentPackage(parentPackage);
config.setModuleName(moduleName);
config.setIncludeTables(includeTables.split(","));
config.setTablePrefix(tablePrefix.split(","));
config.setOutputDir(outputDir);
GeneratorService.execute(config);
String fileName = moduleName + ".zip";
String filePath = outputDir + File.separator + fileName;
// 压缩目录
String[] srcDir = {outputDir + File.separator + (parentPackage.substring(0, parentPackage.indexOf("."))), outputDir + File.separator + "src"};
ZipUtil.toZip(srcDir, filePath, true);
Map data = Maps.newHashMap();
data.put("filePath", filePath);
data.put("fileName", fileName);
return ResultBody.ok().data(data);
}
}

View File

@ -0,0 +1,68 @@
package com.opencloud.generator.server.service;
import com.baomidou.mybatisplus.annotation.DbType;
import lombok.Data;
import java.io.Serializable;
/**
* 代码生成参数配置
*
* @author liuyadu
*/
@Data
public class GenerateConfig implements Serializable {
/**
* 模块名称
*/
private String moduleName;
/**
* 代码生成的类的父包名称
*/
private String parentPackage;
/**
* 去掉表的前缀
*/
private String[] tablePrefix;
/**
* 代码生成包含的表可为空为空默认生成所有
*/
private String[] includeTables;
/**
* 生成代码里注释的作者
*/
private String author;
/**
* 数据库类型
*/
private DbType dbType;
/**
* jdbc驱动
*/
private String jdbcDriver;
/**
* 数据库连接地址
*/
private String jdbcUrl;
/**
* 数据库账号
*/
private String jdbcUserName;
/**
* 数据库密码
*/
private String jdbcPassword;
/**
* 代码生成目录
*/
private String outputDir;
}

View File

@ -0,0 +1,152 @@
package com.opencloud.generator.server.service;
import com.baomidou.mybatisplus.annotation.FieldFill;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.generator.AutoGenerator;
import com.baomidou.mybatisplus.generator.InjectionConfig;
import com.baomidou.mybatisplus.generator.config.*;
import com.baomidou.mybatisplus.generator.config.builder.ConfigBuilder;
import com.baomidou.mybatisplus.generator.config.converts.MySqlTypeConvert;
import com.baomidou.mybatisplus.generator.config.po.TableFill;
import com.baomidou.mybatisplus.generator.config.po.TableInfo;
import com.baomidou.mybatisplus.generator.config.rules.DbColumnType;
import com.baomidou.mybatisplus.generator.config.rules.NamingStrategy;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
/**
* 代码生成器
*/
public class GeneratorService {
public static void execute(GenerateConfig generateConfig) {
AutoGenerator mpg = new AutoGenerator();
// 全局配置
GlobalConfig gc = new GlobalConfig();
gc.setOutputDir(generateConfig.getOutputDir());
gc.setFileOverride(true);
//ActiveRecord特性
gc.setActiveRecord(false);
// XML ResultMap
gc.setBaseResultMap(true);
// XML columList
gc.setBaseColumnList(true);
gc.setEnableCache(false);
// 自动打开输出目录
gc.setOpen(false);
gc.setAuthor(generateConfig.getAuthor());
gc.setSwagger2(true);
//主键策略
gc.setIdType(IdType.ASSIGN_ID);
// 自定义文件命名注意 %s 会自动填充表实体属性
gc.setServiceName("%sService");
gc.setServiceImplName("%sServiceImpl");
mpg.setGlobalConfig(gc);
// 数据源配置
DataSourceConfig dsc = new DataSourceConfig();
dsc.setDbType(generateConfig.getDbType());
dsc.setDriverName(generateConfig.getJdbcDriver());
dsc.setUrl(generateConfig.getJdbcUrl());
dsc.setUsername(generateConfig.getJdbcUserName());
dsc.setPassword(generateConfig.getJdbcPassword());
dsc.setTypeConvert(new MySqlTypeConvert() {
// 自定义数据库表字段类型转换可选
@Override
public DbColumnType processTypeConvert(GlobalConfig globalConfig, String fieldType) {
//将数据库中datetime转换成date
if (fieldType.toLowerCase().contains("datetime")) {
return DbColumnType.DATE;
}
return (DbColumnType) super.processTypeConvert(globalConfig, fieldType);
}
});
mpg.setDataSource(dsc);
// 策略配置
StrategyConfig strategy = new StrategyConfig();
strategy.setCapitalMode(false);
strategy.setRestControllerStyle(true);
strategy.setEntityLombokModel(true);
strategy.setEntityTableFieldAnnotationEnable(false);
// 此处可以移除表前缀表前缀
strategy.setTablePrefix(generateConfig.getTablePrefix());
// 表名生成策略
strategy.setNaming(NamingStrategy.underline_to_camel);
// 字段生成策略
strategy.setColumnNaming(NamingStrategy.underline_to_camel);
strategy.setSuperEntityColumns("create_time", "update_time");
// mapper 父类
strategy.setSuperMapperClass("com.opencloud.common.mybatis.base.mapper.SuperMapper");
// 实体父类
strategy.setSuperEntityClass("com.opencloud.common.mybatis.base.entity.AbstractEntity");
// 接口父类
strategy.setSuperServiceClass("com.opencloud.common.mybatis.base.service.IBaseService");
// 接口实现类父类
strategy.setSuperServiceImplClass("com.opencloud.common.mybatis.base.service.impl.BaseServiceImpl");
// 需要生成的表
strategy.setInclude(generateConfig.getIncludeTables());
ConfigBuilder config = new ConfigBuilder(new PackageConfig(), dsc, strategy, new TemplateConfig(), gc);
List<TableInfo> list = config.getTableInfoList();
// 公共字段填充
ArrayList<TableFill> tableFills = new ArrayList<>();
tableFills.add(new TableFill("createTime", FieldFill.INSERT));
tableFills.add(new TableFill("updateTime", FieldFill.UPDATE));
strategy.setTableFillList(tableFills);
mpg.setStrategy(strategy);
// 包配置
PackageConfig pc = new PackageConfig();
//父包名
pc.setParent(generateConfig.getParentPackage());
//父包模块名
pc.setModuleName(generateConfig.getModuleName());
//实体类父包
pc.setEntity("client.model.entity");
//controller父包
pc.setController("server.controller");
//mapper父包
pc.setMapper("server.mapper");
//xml父包
pc.setXml("resources.mapper");
pc.setServiceImpl("server.service.impl");
pc.setService("server.service");
// 自定义配置
InjectionConfig cfg = new InjectionConfig() {
@Override
public void initMap() {
// to do nothing
}
};
String jsPath = "/templates/api.js.vm";
String vuePath = "/templates/index.vue.vm";
List<FileOutConfig> focList = new ArrayList<>();
// 自定义配置会被优先输出
focList.add(new FileOutConfig(jsPath) {
@Override
public String outputFile(TableInfo tableInfo) {
// 自定义输出文件名 如果你 Entity 设置了前后缀此处注意 xml 的名称会跟着发生变化
String path = gc.getOutputDir() + File.separator + "src/api" + File.separator + tableInfo.getEntityName() + ".js";
return path;
}
});
focList.add(new FileOutConfig(vuePath) {
@Override
public String outputFile(TableInfo tableInfo) {
String path = gc.getOutputDir() + File.separator + "src/view/module" + File.separator + pc.getModuleName() + File.separator + tableInfo.getEntityName() + File.separator + "index.vue";
return path;
}
});
cfg.setFileOutConfigList(focList);
mpg.setCfg(cfg);
mpg.setPackageInfo(pc);
// 执行生成
mpg.execute();
}
}

View File

@ -0,0 +1,129 @@
package com.opencloud.generator.server.utils;
import java.io.*;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
public class ZipUtil {
private static final int BUFFER_SIZE = 2 * 1024;
/**
* @param srcDir 压缩文件夹路径
* @param outDir 压缩文件输出路径
* @param KeepDirStructure 是否保留原来的目录结构,
* true:保留目录结构;
* false:所有文件跑到压缩包根目录下(注意不保留目录结构可能会出现同名文件,会压缩失败)
* @throws RuntimeException 压缩失败会抛出运行时异常
*/
public static void toZip(String[] srcDir, String outDir, boolean KeepDirStructure) throws Exception {
OutputStream out = new FileOutputStream(new File(outDir));
long start = System.currentTimeMillis();
ZipOutputStream zos = null;
try {
zos = new ZipOutputStream(out);
List<File> sourceFileList = new ArrayList<File>();
for (String dir : srcDir) {
File sourceFile = new File(dir);
sourceFileList.add(sourceFile);
}
compress(sourceFileList, zos, KeepDirStructure);
long end = System.currentTimeMillis();
System.out.println("压缩完成,耗时:" + (end - start) + " ms");
} catch (Exception e) {
throw new RuntimeException("zip error from ZipUtils", e);
} finally {
if (zos != null) {
try {
zos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
/**
* 递归压缩方法
*
* @param sourceFile 源文件
* @param zos zip输出流
* @param name 压缩后的名称
* @param KeepDirStructure 是否保留原来的目录结构,
* true:保留目录结构;
* false:所有文件跑到压缩包根目录下(注意不保留目录结构可能会出现同名文件,会压缩失败)
* @throws Exception
*/
private static void compress(File sourceFile, ZipOutputStream zos, String name, boolean KeepDirStructure) throws Exception {
byte[] buf = new byte[BUFFER_SIZE];
if (sourceFile.isFile()) {
zos.putNextEntry(new ZipEntry(name));
int len;
FileInputStream in = new FileInputStream(sourceFile);
while ((len = in.read(buf)) != -1) {
zos.write(buf, 0, len);
}
// Complete the entry
zos.closeEntry();
in.close();
} else {
File[] listFiles = sourceFile.listFiles();
if (listFiles == null || listFiles.length == 0) {
if (KeepDirStructure) {
zos.putNextEntry(new ZipEntry(name + "/"));
zos.closeEntry();
}
} else {
for (File file : listFiles) {
if (KeepDirStructure) {
compress(file, zos, name + "/" + file.getName(),
KeepDirStructure);
} else {
compress(file, zos, file.getName(), KeepDirStructure);
}
}
}
}
}
private static void compress(List<File> sourceFileList, ZipOutputStream zos, boolean KeepDirStructure) throws Exception {
byte[] buf = new byte[BUFFER_SIZE];
for (File sourceFile : sourceFileList) {
String name = sourceFile.getName();
if (sourceFile.isFile()) {
zos.putNextEntry(new ZipEntry(name));
int len;
FileInputStream in = new FileInputStream(sourceFile);
while ((len = in.read(buf)) != -1) {
zos.write(buf, 0, len);
}
zos.closeEntry();
in.close();
} else {
File[] listFiles = sourceFile.listFiles();
if (listFiles == null || listFiles.length == 0) {
if (KeepDirStructure) {
zos.putNextEntry(new ZipEntry(name + "/"));
zos.closeEntry();
}
} else {
for (File file : listFiles) {
if (KeepDirStructure) {
compress(file, zos, name + "/" + file.getName(), KeepDirStructure);
} else {
compress(file, zos, file.getName(), KeepDirStructure);
}
}
}
}
}
}
public static void main(String[] args) throws Exception {
String[] srcDir = {"path\\Desktop\\java", "path\\Desktop\\java2", "path\\Desktop\\fortest.txt"};
String outDir = "path\\Desktop\\aaa.zip";
ZipUtil.toZip(srcDir, outDir, true);
}
}

View File

@ -41,6 +41,4 @@ opencloud:
swagger2:
enabled: true
description: 代码生成服务器
title: 代码生成服务器
logstash:
destination: ${logstash.destination}
title: 代码生成服务器

View File

@ -2,8 +2,6 @@
<configuration debug="false" scan="false">
<springProperty scope="context" name="spring.application.name" source="spring.application.name"
defaultValue="logs"/>
<springProperty scope="context" name="logstash.destination" source="opencloud.logstash.destination"
defaultValue="127.0.0.1:5000"/>
<property name="log.path" value="logs/${spring.application.name}"/>
<!-- Console log output -->
@ -50,8 +48,13 @@
</appender>
<!-- Appender to log to file in a JSON format -->
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>${logstash.destination}</destination>
<appender name="logstash" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/logstash.json</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/%d{yyyy-MM}/logstash.%d{yyyy-MM-dd}.%i.json.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>

View File

@ -52,8 +52,6 @@ opencloud:
enabled: true
description: 平台用户认证服务器
title: 平台用户认证服务器
logstash:
destination: ${logstash.destination}
#client:
# oauth2:
# admin:

View File

@ -2,8 +2,6 @@
<configuration debug="false" scan="false">
<springProperty scope="context" name="spring.application.name" source="spring.application.name"
defaultValue="logs"/>
<springProperty scope="context" name="logstash.destination" source="opencloud.logstash.destination"
defaultValue="127.0.0.1:5000"/>
<property name="log.path" value="logs/${spring.application.name}"/>
<!-- Console log output -->
@ -50,8 +48,13 @@
</appender>
<!-- Appender to log to file in a JSON format -->
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>${logstash.destination}</destination>
<appender name="logstash" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/logstash.json</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/%d{yyyy-MM}/logstash.%d{yyyy-MM-dd}.%i.json.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>

View File

@ -52,8 +52,6 @@ opencloud:
enabled: true
description: 门户开发者认证服务器
title: 门户开发者认证服务器
logstash:
destination: ${logstash.destination}
client:
oauth2:
#portal:

View File

@ -2,8 +2,6 @@
<configuration debug="false" scan="false">
<springProperty scope="context" name="spring.application.name" source="spring.application.name"
defaultValue="logs"/>
<springProperty scope="context" name="logstash.destination" source="opencloud.logstash.destination"
defaultValue="127.0.0.1:5000"/>
<property name="log.path" value="logs/${spring.application.name}"/>
<!-- Console log output -->
@ -50,8 +48,13 @@
</appender>
<!-- Appender to log to file in a JSON format -->
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>${logstash.destination}</destination>
<appender name="logstash" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/logstash.json</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/%d{yyyy-MM}/logstash.%d{yyyy-MM-dd}.%i.json.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>

24
pom.xml
View File

@ -29,6 +29,7 @@
<spring-cloud.version>Greenwich.SR5</spring-cloud.version>
<alibaba.cloud.version>2.1.1.RELEASE</alibaba.cloud.version>
<alibaba.fastjson.version>1.2.66</alibaba.fastjson.version>
<jaeger.version>3.1.1</jaeger.version>
<apache.commons-lang3.version>3.9</apache.commons-lang3.version>
<apache.commons-text.version>1.8</apache.commons-text.version>
@ -58,11 +59,11 @@
<!--maven的settings.xml文件中设置<server>标签-->
<repository>
<id>open-platform-releases</id>
<url>http://192.168.7.31:8081/repository/maven-releases/</url>
<url>http://192.168.7.245:8081/repository/maven-releases/</url>
</repository>
<snapshotRepository>
<id>open-platform-snapshots</id>
<url>http://192.168.7.31:8081/repository/maven-snapshots/</url>
<url>http://192.168.7.245:8081/repository/maven-snapshots/</url>
</snapshotRepository>
</distributionManagement>
@ -82,8 +83,6 @@
<discovery.server-addr>127.0.0.1:8848</discovery.server-addr>
<!--Nacos服务发现命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空-->
<discovery.namespace></discovery.namespace>
<!--Logstash日志收集地址-->
<logstash.destination>127.0.0.1:5000</logstash.destination>
</properties>
</profile>
<!-- 开发 -->
@ -104,8 +103,6 @@
<discovery.server-addr>192.168.7.31:8848</discovery.server-addr>
<!--Nacos服务发现命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空-->
<discovery.namespace></discovery.namespace>
<!--Logstash日志收集地址-->
<logstash.destination>192.168.7.31:5000</logstash.destination>
</properties>
</profile>
<!-- 测试 -->
@ -122,8 +119,6 @@
<discovery.server-addr>192.168.7.83:8848</discovery.server-addr>
<!--Nacos服务发现命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空-->
<discovery.namespace></discovery.namespace>
<!--Logstash日志收集地址-->
<logstash.destination>192.168.7.83:5000</logstash.destination>
</properties>
</profile>
<!-- 预发布 -->
@ -140,8 +135,6 @@
<discovery.server-addr>172.18.55.14:8848</discovery.server-addr>
<!--Nacos服务发现命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空-->
<discovery.namespace></discovery.namespace>
<!--Logstash日志收集地址-->
<logstash.destination>172.18.55.14:5000</logstash.destination>
</properties>
</profile>
<!-- 生产 -->
@ -158,8 +151,6 @@
<discovery.server-addr>172.18.108.117:8848</discovery.server-addr>
<!--Nacos服务发现命名空间,用于支持多环境.这里必须使用ID不能使用名称,默认为空-->
<discovery.namespace></discovery.namespace>
<!--Logstash日志收集地址-->
<logstash.destination>172.18.108.117:5000</logstash.destination>
</properties>
</profile>
</profiles>
@ -225,6 +216,15 @@
<skipTests>true</skipTests>
</configuration>
</plugin>
<!-- docker打包跳过 -->
<plugin>
<groupId>com.spotify</groupId>
<artifactId>dockerfile-maven-plugin</artifactId>
<version>${maven-dockerfile-plugin.version}</version>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -52,8 +52,6 @@ opencloud:
enabled: true
description: 工作流服务器
title: 工作流服务器
logstash:
destination: ${logstash.destination}
#mybatis plus 设置
mybatis-plus:

View File

@ -2,8 +2,6 @@
<configuration debug="false" scan="false">
<springProperty scope="context" name="spring.application.name" source="spring.application.name"
defaultValue="logs"/>
<springProperty scope="context" name="logstash.destination" source="opencloud.logstash.destination"
defaultValue="127.0.0.1:5000"/>
<property name="log.path" value="logs/${spring.application.name}"/>
<!-- Console log output -->
@ -50,8 +48,13 @@
</appender>
<!-- Appender to log to file in a JSON format -->
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>${logstash.destination}</destination>
<appender name="logstash" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/logstash.json</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/%d{yyyy-MM}/logstash.%d{yyyy-MM-dd}.%i.json.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>

View File

@ -42,8 +42,6 @@ opencloud:
enabled: true
description: 评价服务
title: 评价服务
logstash:
destination: ${logstash.destination}
#mybatis plus 设置
mybatis-plus:

View File

@ -2,8 +2,6 @@
<configuration debug="false" scan="false">
<springProperty scope="context" name="spring.application.name" source="spring.application.name"
defaultValue="logs"/>
<springProperty scope="context" name="logstash.destination" source="opencloud.logstash.destination"
defaultValue="127.0.0.1:5000"/>
<property name="log.path" value="logs/${spring.application.name}"/>
<!-- Console log output -->
@ -50,8 +48,13 @@
</appender>
<!-- Appender to log to file in a JSON format -->
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>${logstash.destination}</destination>
<appender name="logstash" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/logstash.json</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/%d{yyyy-MM}/logstash.%d{yyyy-MM-dd}.%i.json.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>

View File

@ -42,8 +42,6 @@ opencloud:
description: 钉钉服务
enabled: true
title: 钉钉服务
logstash:
destination: ${logstash.destination}
#mybatis plus 设置
mybatis-plus:

View File

@ -2,8 +2,6 @@
<configuration debug="false" scan="false">
<springProperty scope="context" name="spring.application.name" source="spring.application.name"
defaultValue="logs"/>
<springProperty scope="context" name="logstash.destination" source="opencloud.logstash.destination"
defaultValue="127.0.0.1:5000"/>
<property name="log.path" value="logs/${spring.application.name}"/>
<!-- Console log output -->
@ -50,8 +48,13 @@
</appender>
<!-- Appender to log to file in a JSON format -->
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>${logstash.destination}</destination>
<appender name="logstash" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/logstash.json</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/%d{yyyy-MM}/logstash.%d{yyyy-MM-dd}.%i.json.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>

View File

@ -41,6 +41,4 @@ opencloud:
swagger2:
enabled: true
description: 文件服务
title: 文件服务
logstash:
destination: ${logstash.destination}
title: 文件服务

View File

@ -2,8 +2,6 @@
<configuration debug="false" scan="false">
<springProperty scope="context" name="spring.application.name" source="spring.application.name"
defaultValue="logs"/>
<springProperty scope="context" name="logstash.destination" source="opencloud.logstash.destination"
defaultValue="127.0.0.1:5000"/>
<property name="log.path" value="logs/${spring.application.name}"/>
<!-- Console log output -->
@ -50,8 +48,13 @@
</appender>
<!-- Appender to log to file in a JSON format -->
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<destination>${logstash.destination}</destination>
<appender name="logstash" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/logstash.json</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/%d{yyyy-MM}/logstash.%d{yyyy-MM-dd}.%i.json.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
<providers>
<timestamp>

View File

@ -51,8 +51,6 @@ opencloud:
enabled: true
description: 消息服务器
title: 消息服务器
logstash:
destination: ${logstash.destination}
#mybatis plus 设置
mybatis-plus:

View File

@ -2,8 +2,6 @@
<configuration debug="false" scan="false">
<springProperty scope="context" name="spring.application.name" source="spring.application.name"
defaultValue="logs"/>
<springProperty scope="context" name="logstash.destination" source="opencloud.logstash.destination"
defaultValue="127.0.0.1:5000"/>
<property name="log.path" value="logs/${spring.application.name}"/>
<!-- Console log output -->
@ -50,8 +48,13 @@