我需要一个水槽来为kudu写事件,所以我实现了自己的Kudu水槽。这是代码:
import java.util.Arrays;
import java.util.Map;
import org.apache.flume.Channel;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.EventDeliveryException;
import org.apache.flume.Transaction;
import org.apache.flume.conf.Configurable;
import org.apache.flume.sink.AbstractSink;
import org.kududb.client.Insert;
import org.kududb.client.KuduClient;
import org.kududb.client.KuduSession;
import org.kududb.client.KuduTable;
import org.kududb.client.PartialRow;
import org.kududb.client.SessionConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author cespedjo
*/
public class CustomKuduSink extends AbstractSink implements Configurable{
private static final String KUDU_TABLE_COLUMNS_CSV = "kudu.columns";
private static final String KUDU_MASTER_SERVER = "kudu.masterserver";
private static final String KUDU_DATABASE = "kudu.database";
private static final String KUDU_COLUMNS_TYPE_CSV = "kudu.columnstype";
private static final String KUDU_EVT_HEADER_KEYS = "kudu.headerKeys";
private static final String KUDU_MAX_EVENTS = "kudu.maxEvents";
private String tableName;
private String[] columns;
private String kuduMaster;
private String[] columnsTypes;
private String[] evtheaderKeys;
private KuduClient kuduClient;
private KuduTable table;
private int maxEvents;
private static final Logger LOG = LoggerFactory
.getLogger(CustomKuduSink.class);
public static final String SESION_ID = "sesionId";
public static final String PANTALLA = "pantalla";
public static final String TYPE = "type";
public static final String TIMESTAMP_CLIENTE = "timestampCliente";
public static final String ENTIDAD = "entidad";
public static final String ROW_KEY = "rowKey";
public static final String TIMESTAMP = "timestamp";
public static final String TARGET = "target";
public static final String ACCION = "accion";
public static final String IP = "ip";
public static final String USERAGENT = "userAgent";
public static final String TIMESTAMP_SERVIDOR = "timestampServidor";
public static final String CONTENIDO_PAGINAORIGEN = "contenido$paginaOrigen";
public static final String CONTENIDO_DURACION = "contenido$duracion";
public static final String CONTENIDO_USERNAME = "contenido$username";
public static final String CONTENIDO_USERTYPE = "contenido$usertype";
public static final String CONTENIDO_LABEL = "contenido$label";
public static final String CONTENIDO_VALUE = "contenido$value";
@Override
public Status process() throws EventDeliveryException {
Status status = Status.READY;
boolean readyForClose = false;
Channel ch = getChannel();
Transaction txn = ch.getTransaction();
txn.begin();
try {
LOG.info("Taking event from channel... ");
Event evt = ch.take();
if (evt == null) {
LOG.info("Event is null... ");
//status = Status.BACKOFF;
} else {
LOG.info(evt.getHeaders().toString());
Insert insert = table.newInsert();
PartialRow row = insert.getRow();
Map<String, String> headers = evt.getHeaders();
KuduSession session = kuduClient.newSession();
session.setFlushMode(SessionConfiguration.FlushMode.AUTO_FLUSH_SYNC);
session.setTimeoutMillis(10000);
for (int i = 0; i < this.columns.length; ++i) {
String aType = this.columnsTypes[i].trim();
String aColumn = this.columns[i].trim();
String value = headers.get(this.evtheaderKeys[i]);
LOG.info("column: " + aColumn + ",type: " + aType + ", value:" + value);
if (aType.equalsIgnoreCase("BIGINT"))
row.addLong(aColumn, value != null && !value.isEmpty() ?
Long.parseLong(value) :
-9999999999L);
if (aType.equalsIgnoreCase("STRING"))
row.addString(aColumn,
value == null || value.isEmpty() ? "NA" : value);
if (aType.equalsIgnoreCase("INT"))
row.addInt(aColumn, value != null && !value.isEmpty() ?
Integer.parseInt(value) :
-999999);
if (aType.equalsIgnoreCase("DECIMAL"))
row.addDouble(aColumn, value != null && !value.isEmpty() ?
Double.parseDouble(value) :
-99999.99);
}
session.apply(insert);
LOG.info("Flushing... " + session.hasPendingOperations());
session.close();
}
readyForClose = true;
txn.commit();
} catch (Throwable t) {
LOG.error(t.getMessage());
LOG.error(Arrays.toString(t.getStackTrace()));
status = Status.BACKOFF;
txn.rollback();
readyForClose = true;
if (t instanceof Error)
throw (Error)t;
} finally {
if (readyForClose) {
txn.close();
} else {
txn.rollback();
txn.close();
}
}
return status;
}
@Override
public synchronized void start() {
try {
this.kuduClient = new KuduClient.KuduClientBuilder(this.kuduMaster)
.build();
this.table = kuduClient.openTable(this.tableName);
} catch (Exception ex) {
LOG.error(Arrays.toString(ex.getStackTrace()));
} finally {
super.start();
}
}
@Override
public synchronized void stop() {
super.stop();
try {
this.kuduClient.close();
this.kuduClient.shutdown();
} catch (Exception ex) {
LOG.error(ex.getMessage());
LOG.error(Arrays.toString(ex.getStackTrace()));
}
}
@Override
public void configure(Context context) {
String columnsCsv = context.getString(KUDU_TABLE_COLUMNS_CSV);
this.columns = columnsCsv.split(",");
this.tableName = context.getString(KUDU_DATABASE);
this.kuduMaster = context.getString(KUDU_MASTER_SERVER);
String columnsTypesCsv = context.getString(KUDU_COLUMNS_TYPE_CSV);
this.columnsTypes = columnsTypesCsv.split(",");
String headerKeys = context.getString(KUDU_EVT_HEADER_KEYS);
this.evtheaderKeys = headerKeys.split(",");
this.maxEvents = context.getInteger(KUDU_MAX_EVENTS, 100);
}
我有三个来源,三个内存通道和三个接收器。 HDFS和AsyncHbase接收器获取事件,但是这个定制的接收器无法捕获事件,即使源将事件注入每个通道。请帮忙。