有关mysql的校验

mysql: [Warning] Using a password on the command line interface can be insecure.
密码最好不要写在命令里。

 

验证表是否存在:
show tables like  'sys_menu';

mysql校验字段是否存在

show columns from sys_online_log like 'id'; 

hive和mysql校验字段是否存在:

msyq:
show columns from sys_online_log ; 

HIVE

desc sys_online_log;

 

首先判断表名是否存在mysql和hive可以共用show tables like 表名:

List<Map<String, Object>> results = jdbcTemplate.queryForList("show tables like '"+tableName+"'");
            if (results.size()==0){
                return Message.fail("该表名不存在!");
            }

 

取出所有字段的信息:以下只取字段名和备注

mysql取Field字段和Comment字段,执行sql为 show full columns from  表名

hive取col_name字段和comment字段,desc 表名;

 

根据不同的类型取不同的数据:

	public Map<String,String> getParamByConnType(String connType, String tableName) {
		Map<String,String> params=new HashMap<>();
		if (connType.equalsIgnoreCase("mysql")){
			params.put("sql","show full columns from "+tableName);
			params.put("column","Field");
			params.put("comment","Comment");
		}else {
			params.put("sql","desc "+tableName);
			params.put("column","col_name");
			params.put("comment","comment");
		}
		return params;
	}

取字段和备注:

List<Map<String, Object>> maps = jdbcTemplate.queryForList(params.get("sql"));
for (Map<String,Object> map:maps){
					service.insert( String.valueOf(map.get(params.get("column"))), String.valueOf(map.get(params.get("comment"))));
				}

 

 

 

 

 

展示全部字段:

show columns from sys_online_log;

 

mysql执行脚本
source  /data/sql/tsg.sql
source  /data/sql/tsg.txt
二者皆可,只要保证文件里是正确的sql就可以。

 

校验数据库是否可以连接成功代码如下:

    private void checkConnection(String username, String password, String connType, String ip
            , String port, String defaultDatabase, String principal) throws Exception {
        java.sql.Connection connection = null;
        try {
            String driverClassName = JdbcUtils.getDriverClassName(connType);
            String jdbcUrl = JdbcUtils.getJdbcUrl(connType, ip, port, defaultDatabase, principal);
            Class.forName(driverClassName);
            connection = DriverManager.getConnection(jdbcUrl, username, password);
        } catch (Exception e) {
            LOGGER.error(e.getMessage(), e);
            throw new ConnnectFailedException("无法连接到数据库");
        } finally {
            if (null != connection) {
                connection.close();
            }
        }
    }





import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class JdbcUtils {
    private static final Logger LOGGER = LoggerFactory.getLogger(JdbcUtils.class);
    public static String getJdbcUrl(String type, String ip, String port, String defaultDatabase, String principal) {
        LOGGER.info("getJdbcUrl type :" + type);
        String s = type.toUpperCase();
        switch (s) {
            case "HIVE":
                String auth = "";
                if (StringUtils.isNotEmpty(principal)) {
                    auth = ";" + principal;
                }
                LOGGER.info("getJdbcUrl: ip-{},defaultDatabase-{},auth-{}",ip,defaultDatabase,auth);
                return "jdbc:" + type + "2://" + ip + ":" + port + "/" + (defaultDatabase == null ? ""
                        : defaultDatabase) + auth;
            case "MYSQL":
                return "jdbc:" + type + "://" + ip + ":" + port + "/" + (defaultDatabase == null ? ""
                        : defaultDatabase) + "?useUnicode=true&characterEncoding=utf-8&useSSL=false";
            default:
                return "jdbc:" + type + "://" + ip + ":" + port + "/" + (defaultDatabase == null ? ""
                        : defaultDatabase);
        }
    }
    public static String getDriverClassName(String type) {
        LOGGER.info("getDriverClassName type:" + type);
        String newType = type.toUpperCase();
        switch (newType) {
            case "HIVE":
                return "org.apache.hive.jdbc.HiveDriver";
            case "MYSQL":
                return "com.mysql.jdbc.Driver";
            default:
                return "";
        }
    }
}

 

jdbcmanage以及


import java.util.HashMap;
import java.util.Map;

import javax.sql.DataSource;

import com.portal.portalservice.connection.entity.Connection;
import org.springframework.jdbc.core.JdbcTemplate;

import com.appframe.entityframe.utils.DaoFactory;

public final class JdbcManager {
	private static Map<String, DataSource> dataSourceMap = new HashMap<String, DataSource>();
	private JdbcManager() {
	}
	public static JdbcTemplate getJdbc(String dataSourceId,String principal) {
		return new JdbcTemplate(getDataSource(dataSourceId,principal));
	}
	private static DataSource getDataSource(String connId,String principal) {
		DataSource dataSource = dataSourceMap.get(connId);
		if(dataSource == null) {
			UgcConnection conn = DaoFactory.getDao(UgcConnection.class).get(connId);
			if(conn == null) {
				return null;
			} else {
				dataSource = DataSourceFactory.getDataSource(conn,principal);
				dataSourceMap.put(connId, dataSource);
			}
		}
		return dataSource;
	}
}

DataSourceFactory :

import javax.sql.DataSource;

import com.alibaba.druid.pool.DruidDataSource;
import com.bi.portal..portalservice.connection.entity.Connection;
import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;

public final class DataSourceFactory {
	private DataSourceFactory() {
	}
	public static DataSource getDataSource(Connection coon, String principal) {
		DataSourceBuilder hikariDataSourceBuilder = DataSourceBuilder.create().type(DruidDataSource.class);
		DruidDataSource druidDataSource = (DruidDataSource) hikariDataSourceBuilder
				.driverClassName(JdbcUtils.getDriverClassName(coon.getConnTypeEnum()))
				.url(JdbcUtils.getJdbcUrl(coon.getConnTypeEnum(),coon.getHost(),
						String.valueOf(coon.getPort()),coon.getSchema(),principal))
				.username(coon.getLogin())
				.password(coon.getPassword())
				.build();
		//配置Hikari连接池
		druidDataSource.setDefaultAutoCommit(true);//update自动提交设置
		druidDataSource.setMaxActive(100000);//连接超时时间设置
		druidDataSource.setTimeBetweenEvictionRunsMillis(600000);//连接空闲生命周期设置
		//druidDataSource.setIsolateInternalQueries(false);//执行查询启动设置
		druidDataSource.setMaxActive(10);//连接池允许的最大连接数量
		//druidDataSource.setMaxLifetime(1800000);//检查空余连接优化连接池设置时间,单位毫秒
		druidDataSource.setMinIdle(3);//连接池保持最小空余连接数量
		druidDataSource.setName(String.valueOf(coon.getId()));//连接池名称
		druidDataSource.setTestWhileIdle(false);
		return druidDataSource;
	}
}

应用实例:

JdbcTemplate jdbcTemplate = JdbcManager.getJdbc(connId,principal);
			if (conn.getConnTypeEnum().equalsIgnoreCase("HIVE")){
				jdbcTemplate.update("set hive.resultset.use.unique.column.names=false");
			}
			Map<String,String> params = service.getParamByConnType(conn.getConnTypeEnum(),tableName);
			LOGGER.info("sql-"+params.get("sql"));
			List<Map<String, Object>> maps = jdbcTemplate.queryForList(params.get("sql"));

 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值