Cassandra Trigger无意中清除了

时间:2017-08-07 14:16:13

标签: java cassandra apache-kafka

所以我的表有4列,插入工作正常。 但是,只要我将Javatrigger连接到表,该表就不再存储最后一列中的值。

Java Trigger本身只是设置一个kafka生成器,我将数据写入。 这不应该以任何方式接触表本身,所以我不明白为什么值被清除。

该表仍然执行我的insert命令,但保留了" value"的最后一列。一旦连接了触发器,它就会变空。

我的cassandra触发器代码如下所示

    public class JavaTrigger implements ITrigger{

    private static final Logger logger = LoggerFactory.getLogger(JavaTrigger.class);

    public Collection<Mutation> augment(Partition partition)
    {
        String tableName = partition.metadata().cfName;
        logger.info("Table: " + tableName);

        String topicname = null;
        String type = null;
        String subtype = null;
        String id = "0";
        int value = 0;

        ByteBuffer partitionKeyValues = partition.partitionKey().getKey();

        id = new String (partitionKeyValues.array());

        try {
            UnfilteredRowIterator it = partition.unfilteredIterator();
            while (it.hasNext()) {
                Unfiltered un = it.next();
                Clustering clt = (Clustering) un.clustering();  
                Iterator<Cell> cells = partition.getRow(clt).cells().iterator();
                Iterator<ColumnDefinition> columns = partition.getRow(clt).columns().iterator();

                int count = 0;

                while(columns.hasNext()){
                    Cell cell = cells.next();


                    if(count == 0) {
                         String data = new String(cell.value().array()); 

                        subtype = data;
                    }

                    if(count == 1) {
                         String data = new String(cell.value().array());

                        type = data;
                    }

                    if(count == 2) {
                         int data = new Integer(cell.value().getInt()); 

                        value = data;
                    }

                    count++;

                }
            }
        } catch (Exception e) {

        }


        createAvroProducer("topic1", id, type, subtype, value);
        }

        return Collections.emptyList();
    }


   public void createAvroProducer(String topicname, String id, String type, String subtype, int value)
    {
    Properties props = new Properties();
    props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.2.113:9092");
    props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
              io.confluent.kafka.serializers.KafkaAvroSerializer.class);
    props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
              io.confluent.kafka.serializers.KafkaAvroSerializer.class);
    props.put("schema.registry.url", "http://192.168.2.113:8081");
    KafkaProducer producer = new KafkaProducer(props);

        String key = "key1";
        String userSchema = 
                "{\"type\":\"record\"," + "\"name\":\"myrecord\","
                + "\"namespace\" : \"MyFlink.FlinkWrapper\"," 
                + "\"fields\":[{\"name\":\"transactionid\",\"type\":\"string\"},"
                + "{\"name\":\"transactionsubtype\",\"type\":\"string\"},"
                + "{\"name\":\"transactiontype\",\"type\":\"string\"},"
                + "{\"name\":\"transactionvalue\",\"type\":\"int\"}]}";
    Schema.Parser parser = new Schema.Parser();
    Schema schema = parser.parse(userSchema);
    GenericRecord avroRecord = new GenericData.Record(schema);
    avroRecord.put("transactionid", id);
    avroRecord.put("transactiontype", type);
    avroRecord.put("transactionsubtype", subtype);
    avroRecord.put("transactionvalue", value);

    ProducerRecord<Object, Object> record = new ProducerRecord<>(topicname, key, avroRecord);

      producer.send(record);
      producer.close();

}

}

0 个答案:

没有答案