首先,我启动了zookeeper,然后启动了kafka服务器,并创建了一个主题。然后,我阅读了tweet并尝试将它们存储到MongoDB中,尽管服务器正在建立连接,但没有任何内容写入我的收藏夹。
这是Java代码:
package kafka;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.LinkedBlockingQueue;
import twitter4j.*;
import twitter4j.StallWarning;
import twitter4j.Status;
import twitter4j.StatusDeletionNotice;
import twitter4j.StatusListener;
import twitter4j.TwitterStream;
import twitter4j.TwitterStreamFactory;
import twitter4j.conf.ConfigurationBuilder;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.log4j.Logger;
import org.bson.Document;
import org.mortbay.util.ajax.JSON;
import com.mongodb.*;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
public class KafkaTwitterProducer {
@SuppressWarnings("resource")
public static void main(String[] args) throws Exception {
final LinkedBlockingQueue<Status> queue = new LinkedBlockingQueue<Status>(1000);
if (args.length < 4) {
System.out.println(
"Usage: KafkaTwitterProducer <twitter-consumer-key> <twitter-consumer-secret> <twitter-access-token> <twitter-access-token-secret> <topic-name> <twitter-search-keywords>");
return;
}
String consumerKey = args[0].toString();
String consumerSecret = args[1].toString();
String accessToken = args[2].toString();
String accessTokenSecret = args[3].toString();
String topicName = args[4].toString();
String[] arguments = args.clone();
String[] keyWords = Arrays.copyOfRange(arguments, 5, arguments.length);
// Set twitter oAuth tokens in the configuration
ConfigurationBuilder cb = new ConfigurationBuilder();
cb.setDebugEnabled(true).setOAuthConsumerKey(consumerKey).setOAuthConsumerSecret(consumerSecret)
.setOAuthAccessToken(accessToken).setOAuthAccessTokenSecret(accessTokenSecret).setJSONStoreEnabled(true);
// Create twitterstream using the configuration
TwitterStream twitterStream = new TwitterStreamFactory(cb.build()).getInstance();
StatusListener listener = new StatusListener() {
public void onStatus(Status status) {
queue.offer(status);
}
public void onDeletionNotice(StatusDeletionNotice statusDeletionNotice) {
System.out.println("Got a status deletion notice id:" + statusDeletionNotice.getStatusId());
}
public void onTrackLimitationNotice(int numberOfLimitedStatuses) {
System.out.println("Got track limitation notice:" + numberOfLimitedStatuses);
}
public void onScrubGeo(long userId, long upToStatusId) {
System.out.println("Got scrub_geo event userId:" + userId + "upToStatusId:" + upToStatusId);
}
public void onStallWarning(StallWarning warning) {
System.out.println("Got stall warning:" + warning);
}
public void onException(Exception ex) {
ex.printStackTrace();
}
};
twitterStream.addListener(listener);
// Filter keywords
FilterQuery query = new FilterQuery().track(keyWords);
twitterStream.filter(query);
// Thread.sleep(5000);
// Add Kafka producer config settings
Properties props = new Properties();
props.put("metadata.broker.list", "localhost:9092");
props.put("bootstrap.servers", "localhost:9092");
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Producer<String, String> producer = new KafkaProducer<String, String>(props);
int i = 0;
int j = 0;
// Mongodb initialization parameters.
int port_no = 27017;
String host_name = "localhost", db_name = "bigdata", db_coll_name = "tweets";
// Mongodb connection string.
String client_url = "mongodb://" + host_name + ":" + port_no + "/" + db_name;
MongoClientURI uri = new MongoClientURI(client_url);
// Connecting to the mongodb server using the given client uri.
MongoClient mongo_client = new MongoClient(uri);
// Fetching the database from the mongodb.
MongoDatabase db = mongo_client.getDatabase(db_name);
// Fetching the collection from the mongodb.
MongoCollection<Document> coll = db.getCollection(db_coll_name);
//List<Document> docs = new ArrayList<Document>();
// poll for new tweets in the queue. If new tweets are added, send them
// to the topic
for(Status status:queue) {
String json = TwitterObjectFactory.getRawJSON(status);
BasicDBObject dbObject = (BasicDBObject) JSON.parse(json);
coll.insertOne(new Document(dbObject));
}
}
}
这是mongod命令提示符向我显示的内容:
2019-05-26T19:26:29.181+0300 I NETWORK [initandlisten] waiting for connections on port 27017
2019-05-26T19:28:05.466+0300 I NETWORK [listener] connection accepted from 127.0.0.1:54232 #1 (1 connection now open)
2019-05-26T19:28:05.475+0300 I NETWORK [conn1] received client metadata from 127.0.0.1:54232 conn1: { driver: { name: "mongo-java-driver", version: "3.5.0" }, os: { type: "Windows", name: "Windows 10", architecture: "amd64", version: "10.0" }, platform: "Java/Oracle Corporation/1.8.0_152-b16" }
,但不向数据库写入任何推文。有人可以解释错误在哪里吗?