api_limit_status()。remaininghits key ERROR;脚本不工作;无法取友

时间:2013-04-03 07:43:17

标签: python twitter tweepy

#!/usr/bin/env python # encoding: utf-8


import tweepy
import networkx 
from networkx import core
import time
import sys
import json
import codecs
import os

consumer_key=""
consumer_secret=""
access_token=""
access_token_secret=""


auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)

api = tweepy.API(auth)


AN_HOUR = 3630
TWENTY_MIN = 20*60
THIRTY_MIN = 30*60
LIMIT = 500

def create_egonet(twitter_api, seed_user):
    try:
        egonet = networkx.DiGraph()
        if (api.rate_limit_status()['remaining_hits'] < 5):
            print(api.rate_limit_status())
            time.sleep(TWENTY_MIN)

        friends = tweepy.Cursor(api.friends, id=seed_user).items(LIMIT)
        print "processing user" + str(seed_user)
        count = 0
        for fr in friends:
            count = count + 1
            egonet.add_edge(seed_user, str(fr.screen_name))
        print str(count) + ' edges added'
        return egonet
    except tweepy.error.TweepError as e:
        print e.reason
        return False


def build_friends_of_friends_network(twitter_api, cur_network):
    min_degree = 1
    users = nodes_at_degree(cur_network, min_degree)
    for user in users:
        time.sleep(1)
        new_network = create_egonet(twitter_api, user)
        if new_network != False:
            cur_network = networkx.compose(cur_network, new_network)
            print(str(networkx.info(cur_network)))
    return cur_network

def nodes_at_degree(network, min_degree):
    d = network.degree()
    d = d.items()
    return [(user) for (user,degree) in d if (degree >= min_degree)]

def save_network(network, original_seed):
    network._name = "original_seed_network"
    networkx.write_edgelist(network, path="twitter_network_edgelist.csv", delimiter='\t')


def core_the_network(new_network, k):
    G = new_network
    kcores = core.find_cores(G)
    core_items = kcores.items()
    two_core_or_higher = [(a) for (a,b) in core_items if b>(k-1)]
    K = networkx.subgraph(G, two_core_or_higher)
    print(str(networkx.info(K)))
    return K



def find_core_friends(cored_network, original_seed):
    K = cored_network
    # get your current friends from K
    N = K.neighbors(original_seed)
    F = []
    # for each of your current friends, gather their friends.  There hopefully will be repeats.
    for u in N:
        D = K.neighbors(u)
        for i in D:
            if N.count(i) < 1 and i is not original_seed:
                F.append(i)
    return F


def order_new_friends(friend_counts):
    F = friend_counts
    counts = dict((k, F.count(k)) for k in set(F))
    counts_items=counts.items()
    counts_items=[(b,a) for (a,b) in counts_items]
    counts_items.sort()
    counts_items.reverse()
    ff = open('suggested_friends', 'w')
    for i in counts_items:
        ff.write(str(i[0])+' of your friends are already following '+i[1] + '\n')
        print(str(i[0])+' of your friends are already following '+i[1])


from operator import itemgetter    
def find_highest_degree_friends(K):
    items = K.in_degree().items()
    items.sort(key=itemgetter(1))
    items.reverse()
    print(str(items[0:20]))

def highest_centrality(cent_dict):
    cent_items=[(b,a) for (a,b) in cent_dict.iteritems()]
    cent_items.sort()
    cent_items.reverse()
    return tuple(reversed(cent_items[0:20]))

终端执行

将TweetRecs导入为tr 将networkx导入为nx
main_net = tr.create_egonet(tr.api, '吉拉德')
追溯(最近的呼叫最后):
  文件“”,第1行,在中   文件“TweetRecs.py”,第59行,在create_egonet中     if(api.rate_limit_status()['remaining_hits']&lt; 5):
KeyError:'remaining_hits'

1 个答案:

答案 0 :(得分:5)

我遇到了同样的问题,但在Stackoverflow上找不到任何帮助。经过一些试验,无论如何,我能够弄清楚并想到发布这个问题的答案,虽然我迟到了2个月:-) 由于Twitter改变了速率限制,api.rate_limit_status()['remaining_hits']不起作用。 Twitter的API限制越来越严格。对于搜索API,它不再需要350个Streaming API调用和一个未知数字(但高于350)。

现在按资源对限制进行评级,限制为15或180,具体取决于您查询的资源。阅读rate limit by resource文档以查找更多详细信息。因此,如果要查找剩余的搜索结果,可以发出以下命令。

limits = api.rate_limit_status()
remain_search_limits = limits['resources']['search']['/search/tweets']['remaining']