风暴kafka-spout和jdbc-bolt

时间:2017-04-17 05:58:57

标签: mysql apache-kafka apache-storm

有人帮我查看我的kafkaspout和jdbcbolt代码!他们不工作!

预览:

我的目的是处理来自kafka,然后storge数据到Mysql的数据!所以,我使用内置的风暴Kafkaspout从kafka获取数据,然后我直接用风暴内置的jdbcbolt将数据存储到MySql !但他们都不工作!

以下代码是我的拓扑:

import java.util.HashMap;
import java.util.Map;
import org.apache.storm.Config;
import org.apache.storm.StormSubmitter;
import org.apache.storm.generated.AlreadyAliveException;
import org.apache.storm.generated.AuthorizationException;
import org.apache.storm.generated.InvalidTopologyException;
import org.apache.storm.jdbc.bolt.JdbcInsertBolt;
import org.apache.storm.jdbc.common.ConnectionProvider;
import org.apache.storm.jdbc.common.HikariCPConnectionProvider;
import org.apache.storm.jdbc.mapper.JdbcMapper;
import org.apache.storm.kafka.KafkaSpout;
import org.apache.storm.kafka.SpoutConfig;
import org.apache.storm.kafka.StringScheme;
import org.apache.storm.kafka.ZkHosts;
import org.apache.storm.spout.SchemeAsMultiScheme;
import org.apache.storm.topology.TopologyBuilder;

public class TopologyKafka2Storm2Mysql {

    public static void main(String[] args) throws AlreadyAliveException, InvalidTopologyException, AuthorizationException {


        ZkHosts zkHosts = new ZkHosts("dn1:2181,dn2:2181,dn3:2181");

        String topic = "logsinfo";
        String consumer_group_id = "c1";
        SpoutConfig spoutConf = new SpoutConfig(zkHosts, topic, "/brokers/topics", consumer_group_id);
/* #####  */    
        spoutConf.scheme = new SchemeAsMultiScheme(new StringScheme());
        TopologyBuilder topologyBuilder = new TopologyBuilder();
/* #####  */            
        topologyBuilder.setSpout("kafka2Stormspout",new KafkaSpout(spoutConf),5);

        Map<String, Object> hikariConfigMap = new HashMap<String, Object>();  
        hikariConfigMap.put("dataSourceClassName","com.mysql.jdbc.jdbc2.optional.MysqlDataSource");  
        hikariConfigMap.put("dataSource.url", "jdbc:mysql://192.168.1.119/logs");  
        hikariConfigMap.put("dataSource.user","root");  
        hikariConfigMap.put("dataSource.password","******");  
        ConnectionProvider connectionProvider = new HikariCPConnectionProvider(hikariConfigMap);  

        String tableName = "logsinfo";  
/* #####  */    
        JdbcMapper myMapper = new MyJDBCMapper(tableName, connectionProvider);  

/* #####  */        
        JdbcInsertBolt userPersistanceBolt = new JdbcInsertBolt(connectionProvider, myMapper)  
                                            .withTableName("logsinfo")  
                                            .withQueryTimeoutSecs(30);  
/* #####  */            
        topologyBuilder.setBolt("MySqlBolt", userPersistanceBolt,1);

        Config config = new Config();
        StormSubmitter.submitTopology("WYL Spout from Kakfa", config, topologyBuilder.createTopology());

    }
}


import java.util.ArrayList;
import java.util.List;
import org.apache.storm.jdbc.common.Column;
import org.apache.storm.jdbc.common.ConnectionProvider;
import org.apache.storm.jdbc.common.JdbcClient;
import org.apache.storm.jdbc.common.Util;
import org.apache.storm.jdbc.mapper.JdbcMapper;
import org.apache.storm.shade.org.apache.commons.lang.Validate;
import org.apache.storm.tuple.ITuple;

public class MyJDBCMapper implements JdbcMapper {
    private List<Column> schemaColumns;
    public MyJDBCMapper(String tableName, ConnectionProvider connectionProvider) {
        Validate.notEmpty(tableName);
        Validate.notNull(connectionProvider);
        int queryTimeoutSecs = 30;
        connectionProvider.prepare();
        JdbcClient client = new JdbcClient(connectionProvider, queryTimeoutSecs);
        this.schemaColumns = client.getColumnSchema(tableName);
    }

    public MyJDBCMapper(List<Column> schemaColumns) {
        Validate.notEmpty(schemaColumns);
        this.schemaColumns = schemaColumns;
    }

    public List<Column> getColumns(ITuple tuple) {
        List<Column> columns = new ArrayList<Column>();
        String[] split = tuple.toString().split(" ");
        for (int i = 0; i < schemaColumns.size(); i++) {
            String columnName = schemaColumns.get(i).getColumnName();
            Integer columnSqlType = schemaColumns.get(i).getSqlType();
            if (Util.getJavaType(columnSqlType).equals(String.class)) {
                String value=split[i];
                columns.add(new Column(columnName, value, columnSqlType));
            }
        }
        return columns;
    }

}

这就是我的代码!下图是拓扑运行时截图! enter image description here

我不知道我哪里错了?

0 个答案:

没有答案
相关问题