-
Notifications
You must be signed in to change notification settings - Fork 0
/
get_metadata.py
95 lines (79 loc) · 2.74 KB
/
get_metadata.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
# AKASH SHARMA-THE-GREAT
import tweepy
import json
import math
import glob
import csv
import zipfile
import zlib
from tweepy import TweepError
from time import sleep
# CHANGE THIS TO THE USER YOU WANT
user = 'narendramodi'
with open('api_keys.json') as f:
keys = json.load(f)
auth = tweepy.OAuthHandler(keys['consumer_key'], keys['consumer_secret'])
auth.set_access_token(keys['access_token'], keys['access_token_secret'])
api = tweepy.API(auth)
user = user.lower()
output_file = '{}.json'.format(user)
output_file_short = '{}_short.json'.format(user)
compression = zipfile.ZIP_DEFLATED
with open('all_ids.json') as f:
ids = json.load(f)
print('total ids: {}'.format(len(ids)))
all_data = []
start = 0
end = 100
limit = len(ids)
i = math.ceil(limit / 100)
for go in range(i):
print('currently getting {} - {}'.format(start, end))
sleep(6) # needed to prevent hitting API rate limit
id_batch = ids[start:end]
start += 100
end += 100
tweets = api.statuses_lookup(id_batch)
for tweet in tweets:
all_data.append(dict(tweet._json))
print('metadata collection complete')
print('creating master json file')
with open(output_file, 'w') as outfile:
json.dump(all_data, outfile)
print('creating ziped master json file')
zf = zipfile.ZipFile('{}.zip'.format(user), mode='w')
zf.write(output_file, compress_type=compression)
zf.close()
results = []
def is_retweet(entry):
return 'retweeted_status' in entry.keys()
def get_source(entry):
if '<' in entry["source"]:
return entry["source"].split('>')[1].split('<')[0]
else:
return entry["source"]
with open(output_file) as json_data:
data = json.load(json_data)
for entry in data:
t = {
"created_at": entry["created_at"],
"text": entry["text"],
"in_reply_to_screen_name": entry["in_reply_to_screen_name"],
"retweet_count": entry["retweet_count"],
"favorite_count": entry["favorite_count"],
"source": get_source(entry),
"id_str": entry["id_str"],
"is_retweet": is_retweet(entry)
}
results.append(t)
print('creating minimized json master file')
with open(output_file_short, 'w') as outfile:
json.dump(results, outfile)
with open(output_file_short) as master_file:
data = json.load(master_file)
fields = ["favorite_count", "source", "text", "in_reply_to_screen_name", "is_retweet", "created_at", "retweet_count", "id_str"]
print('creating CSV version of minimized json master file')
f = csv.writer(open('{}.csv'.format(user), 'w'))
f.writerow(fields)
for x in data:
f.writerow([x["favorite_count"], x["source"], x["text"], x["in_reply_to_screen_name"], x["is_retweet"], x["created_at"], x["retweet_count"], x["id_str"]])