1. Problem analysis

Enter a hiveSQL to verify the semantics, syntax, and whether there are query tables and fields in the data

  • 1.1 HiveSQL Semantic Verification

    Select * from table1 where id = 1 and class = 'English' Select * from table1 where id = 1 and class = select * from table1 where id = 1 and class =Copy the code
  • 1.2 HiveSQL Syntax Verification

    For example, the fields socre and name (1)select * from table are required. Although there is no semantic error, there is no artificially specified score and name. The errorCopy the code
  • 1.3 HiveSQL Verification

    Select score,name from table2 where sex = 2; (1) Meet HiveSQL syntax rules (2) meet artificial syntax rules score and name (3) But there is no sex field in the database. Yes (1) no (2) No (3). The verification failsCopy the code

2. Source code analysis

(1) Scheme 1: Use the SQLUtils method in Ali connection pool Druid

  • Able to complete syntactic and semantic verification. However, it is not possible to verify whether the database has query fields and representations
  • Source code is a little

(2) Solution 2: Communicates with Hive for SQL verification

  • SQL verification using the Hive server
  • The source code is as follows

Find the HiveStatement. Class class in hive-JDBC-1.1.0

 public boolean execute(String sql) throws SQLException {
        this.checkConnection("execute"); this.reInitState(); // Verify SQL TExecuteStatementReqexecReq = new TExecuteStatementReq(this.sessHandle, sql);
        execReq.setRunAsync(true);
        execReq.setConfOverlay(this.sessConf);
        execReq.setQueryTimeout((long)this.queryTimeout);

        try {
            TExecuteStatementResp execResp = this.client.ExecuteStatement(execReq);
            Utils.verifySuccessWithInfo(execResp.getStatus());
            this.stmtHandle = execResp.getOperationHandle();
            this.isExecuteStatementFailed = false;
        } catch (SQLException var9) {
            this.isExecuteStatementFailed = true;
            this.isLogBeingGenerated = false;
            throw var9;
        } catch (Exception var10) {
            this.isExecuteStatementFailed = true;
            this.isLogBeingGenerated = false;
            throw new SQLException(var10.toString(), "08S01", var10); } // execute SQL TGetOperationStatusReq statusReq = new TGetOperationStatusReq(this.stmthandle); boolean operationComplete =false;

        while(! operationComplete) { try { TGetOperationStatusResp statusResp = this.client.GetOperationStatus(statusReq); Utils.verifySuccessWithInfo(statusResp.getStatus());if (statusResp.isSetOperationState()) {
                    switch(statusResp.getOperationState()) {
                    case CLOSED_STATE:
                    case FINISHED_STATE:
                        operationComplete = true;
                        break;
                    case CANCELED_STATE:
                        throw new SQLException("Query was cancelled"."01000");
                    case TIMEDOUT_STATE:
                        throw new SQLTimeoutException("Query timed out after " + this.queryTimeout + " seconds");
                    case ERROR_STATE:
                        throw new SQLException(statusResp.getErrorMessage(), statusResp.getSqlState(), statusResp.getErrorCode());
                    case UKNOWN_STATE:
                        throw new SQLException("Unknown query"."HY000");
                    case INITIALIZED_STATE:
                    case PENDING_STATE:
                    case RUNNING_STATE:
                    }
                }
            } catch (SQLException var7) {
                this.isLogBeingGenerated = false;
                throw var7;
            } catch (Exception var8) {
                this.isLogBeingGenerated = false;
                throw new SQLException(var8.toString(), "08S01", var8);
            }
        }

        this.isLogBeingGenerated = false;
        if(! this.stmtHandle.isHasResultSet()) {return false;
        } else{ this.resultSet = (new Builder(this)).setClient(this.client).setSessionHandle(this.sessHandle).setStmtHandle(this.stmtHandle).setMaxRows(this.m axRows).setFetchSize(this.fetchSize).setScrollable(this.isScrollableResultset).build();return true; }}Copy the code
  • Among them
// The sessHandle object has a lot of attribute values for communicating with hiveServer. For example, sessionID // SQL is the incoming SQL(String type) TExecuteStatementReqexecReq = new TExecuteStatementReq(this.sessHandle, sql); // Enable asynchronous query (true)
        execReq.setRunAsync(true);
        execReq.setConfOverlay(this.sessConf); // Set the query timeexecReq.setQueryTimeout((long)this.queryTimeout); Try {// Execute the request with the clientexecReq, returns the object Resp TExecuteStatementRespexecResp = this.client.ExecuteStatement(execReq); // The Utils package validates the returned object (containing the returned information) and throws an exception if there is an error. Utils.verifySuccessWithInfo(execResp.getStatus()); //(already validated) this.stmthandle =execResp.getOperationHandle();
            this.isExecuteStatementFailed = false;
        } catch (SQLException var9) {
            this.isExecuteStatementFailed = true;
            this.isLogBeingGenerated = false;
            throw var9;
        } catch (Exception var10) {
            this.isExecuteStatementFailed = true;
            this.isLogBeingGenerated = false;
            throw new SQLException(var10.toString(), "08S01", var10);
        }
Copy the code

3. Implementation (third method)

package com.xes.abtest.hive.common.service.impl; import com.alibaba.druid.pool.DruidDataSource; import com.alibaba.druid.pool.DruidPooledStatement; import com.alibaba.druid.proxy.jdbc.StatementProxyImpl; import com.xes.abtest.hive.common.api.ApiResult; import com.xes.abtest.hive.common.service.HiveSqlVerifyService; import org.apache.hive.jdbc.HiveConnection; import org.apache.hive.jdbc.HiveStatement; import org.apache.hive.service.cli.thrift.*; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.jdbc.datasource.DataSourceUtils; import org.springframework.stereotype.Component; import javax.annotation.Resource; import java.lang.reflect.Field; import java.sql.Connection; import java.sql.SQLException; import java.sql.Statement; import java.util.Map; /** * @author:Satan * @date :14:23 * @description : */ @Component public class HiveSqlVerifyServiceImpl implements HiveSqlVerifyService { @qualifier DruidDataSource connection pool @qualifier"hiveDruidDataSource")
    DruidDataSource druidDataSource;


//    @Resource(name = "getDruidPooledConnection")
//    DruidPooledConnection druidPooledConnection;



    @Override
    public ApiResult sqlVerify(String sql) throws SQLException {
//throws SQLException, NoSuchFieldException, IllegalAccessException, TException
        HiveConnection connection = null;
        HiveStatement statement = null;
        TExecuteStatementResp execResp = null;
        TExecuteStatementReq execReq = null; / / to connect the Connection con = DataSourceUtils. GetConnection (druidDataSource); / / below is using reflection, find the class and I need parameters. Try {((DruidPooledStatement) druidPooledConnection. CreateStatement ()). GetStatement (); Statementstat = con.createStatement();

            stat = ((DruidPooledStatement) stat).getStatement();
            Field prox = StatementProxyImpl.class.getDeclaredField("statement");
            prox.setAccessible(true);

             statement = (HiveStatement)prox.get(stat);
          

          (HiveConnection)druidDataSource.getConnection().getConnection();


            Field hiveSessHandle = HiveStatement.class.getDeclaredField("sessHandle");
            hiveSessHandle.setAccessible(true);
            TSessionHandle sessHandle = (TSessionHandle) hiveSessHandle.get(statement);
            Field hiveClient = HiveStatement.class.getDeclaredField("client");
            hiveClient.setAccessible(true);
            TCLIService.Iface client = (TCLIService.Iface) hiveClient.get(statement);
            Field sessConf = HiveStatement.class.getDeclaredField("sessConf");
            sessConf.setAccessible(true);


            Map<String, String> o = (Map<String, String>)sessConf.get(statement);

            execReq = new TExecuteStatementReq(sessHandle, sql);
            execReq.setRunAsync(true);
            execReq.setQueryTimeout(0L);
            execReq.setConfOverlay(o);

            execResp = client.ExecuteStatement(execReq);
        } catch (Exception e) {


            e.printStackTrace();
            throw new SQLException("SqlVerify abnormal"); } the finally {/ / using the tool to release release connection DataSourceUtils releaseConnection (con, druidDataSource); } boolean equals =execResp.getStatus().getStatusCode().equals(TStatusCode.SUCCESS_STATUS);

            if(equals){// Returns a uniform typereturn ApiResult.ok();
            }else {
                return ApiResult.fail(execResp.getStatus().getErrorMessage()); }}}Copy the code
  • Druid connection pool configuration of Hive.
package com.xes.abtest.hive.common.config;


import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.xes.abtest.hive.common.util.JdbcTemplateEdit;
import lombok.extern.slf4j.Slf4j;
import org.mybatis.spring.annotation.MapperScan;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;

import javax.sql.DataSource;
import java.sql.SQLException;


/**
 * @author:Satan
 * @date :14:11
 * @description :
 */
@Slf4j
@Configuration
@EnableConfigurationProperties({DataSourceProperties.class,DataSourceCommonProperties.class})
@MapperScan(basePackages ="com.xes.abtest.hive.common.mapper"/*, sqlSessionFactoryRef = "slaveSqlSessionFactory"*/)
public class HiveDruidConfig {
    public static DruidPooledConnection druidPooledConnection = null;
    private  Logger logger = LoggerFactory.getLogger(HiveDruidConfig.class);


    @Autowired
    private DataSourceProperties dataSourceProperties;

    @Autowired
    private DataSourceCommonProperties dataSourceCommonProperties;






    @Bean("hiveDruidDataSource"// Create a new bean instance @qualifier ("hiveDruidDataSource"// public DruidDataSource/*DataSource*/dataSource(){ DruidDataSource datasource = new DruidDataSource(); / / configuration data source property datasource. SetUrl (dataSourceProperties. GetHive () get ("url"));
        datasource.setUsername(dataSourceProperties.getHive().get("username"));
        datasource.setPassword(dataSourceProperties.getHive().get("password"));
        datasource.setDriverClassName(dataSourceProperties.getHive().get("driver-class-name")); / / configuration unified property datasource. SetInitialSize (dataSourceCommonProperties. GetInitialSize ()); datasource.setMinIdle(dataSourceCommonProperties.getMinIdle());
        datasource.setMaxActive(dataSourceCommonProperties.getMaxActive());
        datasource.setMaxWait(dataSourceCommonProperties.getMaxWait());
        datasource.setTimeBetweenEvictionRunsMillis(dataSourceCommonProperties.getTimeBetweenEvictionRunsMillis());
        datasource.setMinEvictableIdleTimeMillis(dataSourceCommonProperties.getMinEvictableIdleTimeMillis());
        datasource.setValidationQuery(dataSourceCommonProperties.getValidationQuery());
        datasource.setTestWhileIdle(dataSourceCommonProperties.isTestWhileIdle());
        datasource.setTestOnBorrow(dataSourceCommonProperties.isTestOnBorrow());
        datasource.setTestOnReturn(dataSourceCommonProperties.isTestOnReturn());
        datasource.setPoolPreparedStatements(dataSourceCommonProperties.isPoolPreparedStatements());

        try {
            datasource.setFilters(dataSourceCommonProperties.getFilters());
        } catch (SQLException e) {
            logger.error("Druid configuration initialization filter error.", e);
        }
        returndatasource; } /** * This method uses ** @param dataSource to inject a bean named primaryDataSource * @ only when the JdbcTemplate object is neededreturnDatasource JdbcTemplate object */ // @bean (name ="slaveSqlSessionFactory")
//    public SqlSessionFactory sqlSessionFactory(@Qualifier("hiveDruidDataSource") DataSource dataSource) throws Exception {
//        MybatisSqlSessionFactoryBean sessionFactoryBean = new MybatisSqlSessionFactoryBean();
//        //sessionFactoryBean.setMapperLocations(new Resource[]{new ClassPathResource("mapper/*Mapper.xml")});
//        sessionFactoryBean.setDataSource(dataSource);
//
//        return sessionFactoryBean.getObject();
//    }

    @Bean(name = "primaryJdbcTemplate")
    public JdbcTemplate jdbcTemplate(@Qualifier("hiveDruidDataSource") DataSource dataSource) {
        return new JdbcTemplate(dataSource);
    }



    @Bean("hiveDruidDataSource1"// Create a new bean instance @qualifier ("hiveDruidDataSource1"// public DruidDataSource/*DataSource*/dataSource1(){
        DruidDataSource datasource = new DruidDataSource();
        datasource.setUrl(dataSourceProperties.getHive().get("url"));
        datasource.setUsername(dataSourceProperties.getHive().get("username"));
        datasource.setPassword(dataSourceProperties.getHive().get("password"));
        datasource.setDriverClassName(dataSourceProperties.getHive().get("driver-class-name"));
        return datasource;
    }


    @Bean("getJdbcTemplateEdit"// Create a new bean instance @qualifier ("getJdbcTemplateEdit"Public JdbcTemplateEdit/*DataSource*/ getJdbcTemplateEdit(@qualifier ("hiveDruidDataSource") DruidDataSource dataSource){


        return  new JdbcTemplateEdit(dataSource);
    }




    @Bean("getDruidPooledConnection"// Create a new bean instance @qualifier ("getDruidPooledConnection") mark / / / / @ Scope (ConfigurableBeanFactory. R) / / @ Scope (ConfigurableBeanFactory. SCOPE_PROTOTYPE) to the public DruidPooledConnection/*DataSource*/ getDruidPooledConnection(@Qualifier("hiveDruidDataSource") DruidDataSource dataSource){

        DruidPooledConnection connection = null;
        try {
            connection = dataSource.getConnection();
            druidPooledConnection = connection;
        } catch (SQLException e) {
            e.printStackTrace();
        }

        returnconnection; }}Copy the code