数据存储区争用错误

时间:2017-12-13 15:44:49

标签: node.js google-app-engine google-cloud-datastore google-cloud-pubsub

Error: too much contention on these datastore entities. please try again.
at /Users/wgosse/Documents/data-transfer-request/node_modules/grpc/src/node/src/client.js:554:15 code: 409, metadata: Metadata { _internal_repr: {} }

我们正在尝试建立一个系统,其中节点事件侦听器将从Pubsub队列中提取消息,并在它们进入时使用这些消息更新数据存储区实体。不幸的是,当太多时我们遇到了争用错误消息立即被删除。通常,我们会批处理这些请求,但在事件监听器中使用此代码会使其难以实现。除了批处理有没有办法消除这些错误?

我们尝试更新的实体确实具有共享祖先(如果相关的话)。

listenForMessages建立事件监听器并使用更新和确认逻辑显示回调。

// Start listener to wait for return messages
pubsub_model.listenForMessages((message) => {
  filepath_ctrl.updateFromSub(
    message.attributes,
    (err, data) => {
      if (err) {
        console.log('PUBSUB: Unable to update filepath entity. Error message: ', err);
        return false;
      }
      console.log('PUBSUB: Filepath entity updated.');

      // "Ack" (acknowledge receipt of) the message
      message.ack();
      return data;
    }
  );
});

/**
 * Establishes an event listener to recieve return messages post processing
 * @param {Integer} retries
 * @param {Function} messageHandler
 */
function listenForMessages(messageCallback) {
  pubsubConnect(
    0,
    return_topic,
    config.get('PUBSUB_RECIEVE_TOPIC'),
    return_sub,
    config.get('PUBSUB_RECIEVE_SUB'),
    (err) => {
      if (err) {
        console.log('PUBSUB: ERROR: Error encountered while attempting to establish listening connection: ', err);
        return false;
      }
      console.log('PUBSUB: Listening for messages...');
      //function for handling messages
      const msgHandlerConstruct = (message) => {
        messageHandler(messageCallback, message);
      };
      const errHandler = (puberr) => {
        console.log('PUBSUB: ERROR: Error encountered when listening for messages: ', puberr);
      }

      return_sub.on('message', msgHandlerConstruct);
      return_sub.on('error', errHandler);


      return true;
    }
  );
  return true;
}

/**
 * Business logic for processing return messages.  Upserts the message into the datastore as a filepath.
 * @param {object} message
 */
function messageHandler(callback, message) {
  console.log(`PUBSUB: Received message ${message.id}:`);
  console.log(`\tData: ${message.data}`);
  console.log(`\tAttributes: ${JSON.stringify(message.attributes)}`);
  // Datastore update logic

  //Callback MUST acknowledge after error detection
  callback(message);
}

updateFromSub接收消息并将属性结构化为要保存到数据存储区的实体,然后调用我们的更新方法。

/**
 * Gets the entity to be updated and updates anything that's changed in the message
 * @param {*} msg_id
 * @param {*} transfer_id
 * @param {*} cb
 */
module.exports.updateFromSub = function (msg_attributes, cb) {
  if (msg_attributes.id && msg_attributes.transfer_id) {
    filepath_model.read(msg_attributes.id, msg_attributes.transfer_id, (err, entity) => {
      if (err) {
        return cb(err);
      }
      writeUpdateToOject(entity, msg_attributes, (obj_err, updated_entity) => {
        if (obj_err) {
          return cb(err);
        }
        filepath_model.update(msg_attributes.id, msg_attributes.transfer_id, updated_entity, cb);
        return true;
      });
      return true;
    });
  } else {
    cb('Message missing id and/or transfer id.  Message: ', msg_attributes);
    return false;
  }
  return true;
};

更新方法来自GCP教程,但已经过修改以适应父子关系。

const Datastore = require('@google-cloud/datastore');
const ds = Datastore({
    projectId: config.get('GCLOUD_PROJECT')
});
function update (id, parentId, data, cb) {
  let key;
  if (id) {
    key = ds.key([parentKind,
      parseInt(parentId, 10),
      kind,
      parseInt(id, 10)]);
  } else {
    key = ds.key([parentKind,
      parseInt(parentId, 10),
      kind]);
  }

  const entity = {
    key: key,
    data: toDatastore(data, ['description'])
  };

  ds.save(
    entity,
    (err) => {
      data.id = entity.key.id;
      cb(err, err ? null : data);
    }
  );
}

1 个答案:

答案 0 :(得分:1)

您正在达到同一实体组的每秒写入次数限制。默认为每秒1次写入。

数据存储限制表。

https://cloud.google.com/datastore/docs/concepts/limits

似乎pubsub生成的消息强度太高,因此数据存储区无法在此限制内逐个编写消息。您可以尝试的是使用pubsub轮询订阅,收集更新集并使用单个批处理进行编写。