forked from scottstamp/PokemonGo-Map
-
Notifications
You must be signed in to change notification settings - Fork 0
/
search.py
290 lines (236 loc) · 10.9 KB
/
search.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Search Architecture:
- Have a list of accounts
- Create an "overseer" thread
- Search Overseer:
- Tracks incoming new location values
- Tracks "paused state"
- During pause or new location will clears current search queue
- Starts search_worker threads
- Search Worker Threads each:
- Have a unique API login
- Listens to the same Queue for areas to scan
- Can re-login as needed
- Shares a global lock for map parsing
'''
import logging
import time
import math
import platform
import threading
from threading import Thread, Lock
from queue import Queue, Empty
from pgoapi import PGoApi
from pgoapi.utilities import f2i
from pgoapi import utilities as util
from pgoapi.exceptions import AuthException
from . import config
from .models import parse_map
log = logging.getLogger(__name__)
TIMESTAMP = '\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000'
def get_new_coords(init_loc, distance, bearing):
""" Given an initial lat/lng, a distance(in kms), and a bearing (degrees),
this will calculate the resulting lat/lng coordinates.
"""
R = 6378.1 #km radius of the earth
bearing = math.radians(bearing)
init_coords = [math.radians(init_loc[0]), math.radians(init_loc[1])] # convert lat/lng to radians
new_lat = math.asin( math.sin(init_coords[0])*math.cos(distance/R) +
math.cos(init_coords[0])*math.sin(distance/R)*math.cos(bearing))
new_lon = init_coords[1] + math.atan2(math.sin(bearing)*math.sin(distance/R)*math.cos(init_coords[0]),
math.cos(distance/R)-math.sin(init_coords[0])*math.sin(new_lat))
return [math.degrees(new_lat), math.degrees(new_lon)]
def generate_location_steps(initial_loc, step_count):
#Bearing (degrees)
NORTH = 0
EAST = 90
SOUTH = 180
WEST = 270
pulse_radius = 0.07 # km - radius of players heartbeat is 70m
xdist = math.sqrt(3)*pulse_radius # dist between column centers
ydist = 3*(pulse_radius/2) # dist between row centers
yield (initial_loc[0], initial_loc[1], 0) #insert initial location
ring = 1
loc = initial_loc
while ring < step_count:
#Set loc to start at top left
loc = get_new_coords(loc, ydist, NORTH)
loc = get_new_coords(loc, xdist/2, WEST)
for direction in range(6):
for i in range(ring):
if direction == 0: # RIGHT
loc = get_new_coords(loc, xdist, EAST)
if direction == 1: # DOWN + RIGHT
loc = get_new_coords(loc, ydist, SOUTH)
loc = get_new_coords(loc, xdist/2, EAST)
if direction == 2: # DOWN + LEFT
loc = get_new_coords(loc, ydist, SOUTH)
loc = get_new_coords(loc, xdist/2, WEST)
if direction == 3: # LEFT
loc = get_new_coords(loc, xdist, WEST)
if direction == 4: # UP + LEFT
loc = get_new_coords(loc, ydist, NORTH)
loc = get_new_coords(loc, xdist/2, WEST)
if direction == 5: # UP + RIGHT
loc = get_new_coords(loc, ydist, NORTH)
loc = get_new_coords(loc, xdist/2, EAST)
yield (loc[0], loc[1], 0)
ring += 1
#
# A fake search loop which does....nothing!
#
def fake_search_loop():
while True:
log.info('Fake search loop running')
time.sleep(10)
# The main search loop that keeps an eye on the over all process
def search_overseer_thread(args, new_location_queue, pause_bit):
log.info('Search overseer starting')
search_items_queue = Queue()
parse_lock = Lock()
# Create a search_worker_thread per account
log.info('Starting search worker threads')
for i, account in enumerate(args.accounts):
log.debug('Starting search worker thread %d for user %s', i, account['username'])
t = Thread(target=search_worker_thread,
name='search_worker_{}'.format(i),
args=(args, account, search_items_queue, parse_lock))
t.daemon = True
t.start()
# A place to track the current location
current_location = False;
# The real work starts here but will halt on pause_bit.set()
while True:
# paused; clear queue if needed, otherwise sleep and loop
if pause_bit.is_set():
if not search_items_queue.empty():
try:
while True:
search_items_queue.get_nowait()
except Empty:
pass
time.sleep(1)
continue
# If a new location has been passed to us, get the most recent one
if not new_location_queue.empty():
log.info('New location caught, moving search grid')
try:
while True:
current_location = new_location_queue.get_nowait()
except Empty:
pass
# We (may) need to clear the search_items_queue
if not search_items_queue.empty():
try:
while True:
search_items_queue.get_nowait()
except Empty:
pass
# If there are no search_items_queue either the loop has finished (or been
# cleared above) -- either way, time to fill it back up
if search_items_queue.empty():
log.debug('Search queue empty, restarting loop')
for step, step_location in enumerate(generate_location_steps(current_location, args.step_limit), 1):
log.debug('Queueing step %d @ %f/%f/%f', step, step_location[0], step_location[1], step_location[2])
search_args = (step, step_location)
search_items_queue.put(search_args)
# else:
# log.info('Search queue processing, %d items left', search_items_queue.qsize())
# Now we just give a little pause here
time.sleep(1)
def search_worker_thread(args, account, search_items_queue, parse_lock):
log.debug('Search worker thread starting')
# The forever loop for the thread
while True:
try:
log.debug('Entering search loop')
# Create the API instance this will use
api = PGoApi()
# The forever loop for the searches
while True:
# Grab the next thing to search (when available)
step, step_location = search_items_queue.get()
log.info('Search step %d beginning (queue size is %d)', step, search_items_queue.qsize())
# Let the api know where we intend to be for this loop
api.set_position(*step_location)
# The loop to try very hard to scan this step
failed_total = 0
while True:
# After so many attempts, let's get out of here
if failed_total >= args.scan_retries:
# I am choosing to NOT place this item back in the queue
# otherwise we could get a "bad scan" area and be stuck
# on this overall loop forever. Better to lose one cell
# than have the scanner, essentially, halt.
log.error('Search step %d went over max scan_retires; abandoning', step)
break
# Increase sleep delay between each failed scan
# By default scan_dela=5, scan_retries=5 so
# We'd see timeouts of 5, 10, 15, 20, 25
sleep_time = args.scan_delay * (1+failed_total)
# Ok, let's get started -- check our login status
check_login(args, account, api, step_location)
# Make the actual request (finally!)
response_dict = map_request(api, step_location)
# G'damnit, nothing back. Mark it up, sleep, carry on
if not response_dict:
log.error('Search step %d area download failed, retyring request in %g seconds', step, sleep_time)
failed_total += 1
time.sleep(sleep_time)
continue
# Got the response, lock for parsing and do so (or fail, whatever)
with parse_lock:
try:
parsed = parse_map(response_dict, step_location)
log.debug('Search step %s completed', step)
search_items_queue.task_done()
break # All done, get out of the request-retry loop
except KeyError:
log.error('Search step %s map parsing failed, retyring request in %g seconds', step, sleep_time)
failed_total += 1
time.sleep(sleep_time)
time.sleep(args.scan_delay)
# catch any process exceptions, log them, and continue the thread
except Exception as e:
log.exception('Exception in search_worker: %s', e)
def check_login(args, account, api, position):
# Logged in? Enough time left? Cool!
if api._auth_provider and api._auth_provider._ticket_expire:
remaining_time = api._auth_provider._ticket_expire/1000 - time.time()
if remaining_time > 60:
log.debug('Credentials remain valid for another %f seconds', remaining_time)
return
# Try to login (a few times, but don't get stuck here)
i = 0
api.set_position(position[0], position[1], position[2])
while i < args.login_retries:
try:
api.set_authentication(provider = account['auth_service'], username = account['username'], password = account['password'])
break
except AuthException:
if i >= args.login_retries:
raise TooManyLoginAttempts('Exceeded login attempts')
else:
i += 1
log.error('Failed to login to Pokemon Go with account %s. Trying again in %g seconds', account['username'], args.login_delay)
time.sleep(args.login_delay)
if platform.system() == 'Windows':
api.activate_signature(".\pogom\encrypt.dll")
else:
api.activate_signature(".\pogom\libencrypt.so")
log.debug('Login for account %s successful', account['username'])
def map_request(api, position):
try:
cell_ids = util.get_cell_ids(position[0], position[1])
timestamps = [0,] * len(cell_ids)
return api.get_map_objects(latitude=f2i(position[0]),
longitude=f2i(position[1]),
since_timestamp_ms=timestamps,
cell_id=cell_ids)
except Exception as e:
log.warning('Exception while downloading map: %s', e)
return False
class TooManyLoginAttempts(Exception):
pass