diff --git a/README-CN.md b/README-CN.md
index 74a3f309f88..d7a5c1e6cb4 100644
--- a/README-CN.md
+++ b/README-CN.md
@@ -652,7 +652,7 @@ python3(python) run_page/garmin_sync_cn_global.py ${garmin_cn_secret_string} ${g
-> Please note:由于nike run club已经在中国大陆停止运营,所以只能通过vpn的方式进行登录。在开始之前先确认自己是全局的非中国大陆的代理,能够正确的访问`nike.com`而不是`nike.com.cn` 如下图所示.
+> Please note:由于nike run club已经在中国大陆停止运营,所以只能通过 vpn 的方式进行登录。在开始之前先确认自己是全局的非中国大陆的代理,能够正确的访问`nike.com`而不是`nike.com.cn` 如下图所示.
![nike.com](https://github.com/user-attachments/assets/8ce6ae8f-4bc6-4522-85ec-3e5b7590e96d)
@@ -661,13 +661,17 @@ python3(python) run_page/garmin_sync_cn_global.py ${garmin_cn_secret_string} ${g
![login](https://github.com/user-attachments/assets/659341fb-4abf-491e-bda7-bfca968921b3)
2. 登录成功后,键盘打开F12->Application->localstorage-> 复制键为`https://www.nike.com`的值中的`access_token`的内容.
![developer_mode](https://github.com/user-attachments/assets/c932318d-a123-4505-8fd8-b46946c25d29)
-3. 修改 `nike_sync.py` 文件中的代码,并且把你刚才复制的内容粘贴进去 如下图所示.
- ![nike_sync.py](https://github.com/user-attachments/assets/6de91725-ed17-4338-96d5-727eb75cee24)
-4. 在根目录执行,你应该就可以看到下图中的内容,然后你就可以正常在你的手机版NRC里登录你的账号了:
+3. 在根目录执行,你应该就可以看到下图中的内容,然后你就可以正常在你的手机版NRC里登录你的账号了:
```bash
-python3(python) run_page/nike_sync.py aaaaa
+python3(python) run_page/nike_sync.py ${access_token}
```
+
+如果你同步了一次(已经完成同步)想继续同步新的
+```bash
+python3(python) run_page/nike_sync.py ${access_token} --continue-sync
+```
+
![tg_image_166091873](https://github.com/user-attachments/assets/9d4851d6-849a-4bb7-8ffe-5358fa7328b2)
如果你想自动化同步NRC中的运动数据,去 [issue692](https://github.com/yihong0618/running_page/issues/692#issuecomment-2218849713)中查看相关内容.
diff --git a/README.md b/README.md
index b656dd9790e..70d78aebf45 100644
--- a/README.md
+++ b/README.md
@@ -454,18 +454,21 @@ python3(python) run_page/garmin_sync_cn_global.py ${garmin_cn_secret_string} ${g
1. Sign in/Sign up [NikeRunClub](https://www.nike.com/) account
![login](https://github.com/user-attachments/assets/659341fb-4abf-491e-bda7-bfca968921b3)
2. after successful login,openF12->Application->localstorage-> copy the content of "access_token" from the value of key`https://www.nike.com`.
-![developer_mode](https://github.com/user-attachments/assets/c932318d-a123-4505-8fd8-b46946c25d29)
-3. modify the code in the "nike_sync.py" file and paste the content you just copied as shown in the following image.
-![nike_sync.py](https://github.com/user-attachments/assets/6de91725-ed17-4338-96d5-727eb75cee24)
-4. Execute in the root directory , you should be able to see the image below, and then you can log into your account on the mobile as usual:
+3. Execute in the root directory , you should be able to see the image below, and then you can log into your account on the mobile as usual:
```bash
-python3(python) run_page/nike_sync.py aaaaa
+python3(python) run_page/nike_sync.py ${access_token}
```
![tg_image_166091873](https://github.com/user-attachments/assets/9d4851d6-849a-4bb7-8ffe-5358fa7328b2)
if you want to automate the submission of NRC data, you can refer to [issue692](https://github.com/yihong0618/running_page/issues/692#issuecomment-2218849713).
+If you've previously synced activities and want to continue syncing new ones, with `--continue-sync` args
+
+```bash
+python3(python) run_page/nike_sync.py ${access_token} --continue-sync
+```
+
### Nike Run Club
diff --git a/run_page/gpxtrackposter/track.py b/run_page/gpxtrackposter/track.py
index 8e7a9334dd3..90dfda33585 100644
--- a/run_page/gpxtrackposter/track.py
+++ b/run_page/gpxtrackposter/track.py
@@ -6,6 +6,7 @@
# license that can be found in the LICENSE file.
import datetime
+from datetime import timezone
import os
from collections import namedtuple
@@ -233,12 +234,13 @@ def _load_fit_data(self, fit: dict):
_polylines = []
self.polyline_container = []
message = fit["session_mesgs"][0]
- self.start_time = datetime.datetime.utcfromtimestamp(
- (message["start_time"] + FIT_EPOCH_S)
+ self.start_time = datetime.datetime.fromtimestamp(
+ (message["start_time"] + FIT_EPOCH_S), tz=timezone.utc
)
self.run_id = self.__make_run_id(self.start_time)
- self.end_time = datetime.datetime.utcfromtimestamp(
- (message["start_time"] + FIT_EPOCH_S + message["total_elapsed_time"])
+ self.end_time = datetime.datetime.fromtimestamp(
+ (message["start_time"] + FIT_EPOCH_S + message["total_elapsed_time"]),
+ tz=timezone.utc,
)
self.length = message["total_distance"]
self.average_heartrate = (
diff --git a/run_page/joyrun_sync.py b/run_page/joyrun_sync.py
index c5f03e6f621..e562c0ebd50 100755
--- a/run_page/joyrun_sync.py
+++ b/run_page/joyrun_sync.py
@@ -6,7 +6,7 @@
import sys
import time
from collections import namedtuple
-from datetime import datetime, timedelta
+from datetime import datetime, timedelta, timezone
from hashlib import md5
from urllib.parse import quote
@@ -209,7 +209,7 @@ def parse_points_to_gpx(
points_dict = {
"latitude": point[0],
"longitude": point[1],
- "time": datetime.utcfromtimestamp(current_time),
+ "time": datetime.fromtimestamp(current_time, tz=timezone.utc),
}
points_dict_list.append(points_dict)
@@ -224,7 +224,7 @@ def parse_points_to_gpx(
{
"latitude": run_points_data[-1][0],
"longitude": run_points_data[-1][1],
- "time": datetime.utcfromtimestamp(end_time),
+ "time": datetime.fromtimestamp(end_time, tz=timezone.utc),
}
)
segment_list.append(points_dict_list)
@@ -289,9 +289,9 @@ def parse_raw_data_to_nametuple(self, run_data, old_gpx_ids, with_gpx=False):
polyline_str = polyline.encode(run_points_data) if run_points_data else ""
start_latlng = start_point(*run_points_data[0]) if run_points_data else None
- start_date = datetime.utcfromtimestamp(start_time)
+ start_date = datetime.fromtimestamp(start_time, tz=timezone.utc)
start_date_local = adjust_time(start_date, BASE_TIMEZONE)
- end = datetime.utcfromtimestamp(end_time)
+ end = datetime.fromtimestamp(end_time, tz=timezone.utc)
# only for China now
end_local = adjust_time(end, BASE_TIMEZONE)
location_country = None
diff --git a/run_page/keep_sync.py b/run_page/keep_sync.py
index 491db5dc993..d49a8341981 100755
--- a/run_page/keep_sync.py
+++ b/run_page/keep_sync.py
@@ -5,7 +5,7 @@
import time
import zlib
from collections import namedtuple
-from datetime import datetime, timedelta
+from datetime import datetime, timedelta, timezone
import eviltransform
import gpxpy
@@ -66,7 +66,7 @@ def get_to_download_runs_ids(session, headers, sport_type):
logs = [j["stats"] for j in i["logs"]]
result.extend(k["id"] for k in logs if not k["isDoubtful"])
last_date = r.json()["data"]["lastTimestamp"]
- since_time = datetime.utcfromtimestamp(last_date / 1000)
+ since_time = datetime.fromtimestamp(last_date / 1000, tz=timezone.utc)
print(f"pares keep ids data since {since_time}")
time.sleep(1) # spider rule
if not last_date:
@@ -146,10 +146,10 @@ def parse_raw_data_to_nametuple(
print(f"ID {keep_id} no gps data")
polyline_str = polyline.encode(run_points_data) if run_points_data else ""
start_latlng = start_point(*run_points_data[0]) if run_points_data else None
- start_date = datetime.utcfromtimestamp(start_time / 1000)
+ start_date = datetime.fromtimestamp(start_time / 1000, tz=timezone.utc)
tz_name = run_data.get("timezone", "")
start_date_local = adjust_time(start_date, tz_name)
- end = datetime.utcfromtimestamp(run_data["endTime"] / 1000)
+ end = datetime.fromtimestamp(run_data["endTime"] / 1000, tz=timezone.utc)
end_local = adjust_time(end, tz_name)
if not run_data["duration"]:
print(f"ID {keep_id} has no total time just ignore please check")
@@ -227,9 +227,10 @@ def parse_points_to_gpx(run_points_data, start_time, sport_type):
points_dict = {
"latitude": point["latitude"],
"longitude": point["longitude"],
- "time": datetime.utcfromtimestamp(
+ "time": datetime.fromtimestamp(
(point["timestamp"] * 100 + start_time)
- / 1000 # note that the timestamp of a point is decisecond(分秒)
+ / 1000, # note that the timestamp of a point is decisecond(分秒)
+ tz=timezone.utc,
),
"elevation": point.get("verticalAccuracy"),
"hr": point.get("hr"),
diff --git a/run_page/nike_sync.py b/run_page/nike_sync.py
index 3ca966a3edd..244b75186b4 100644
--- a/run_page/nike_sync.py
+++ b/run_page/nike_sync.py
@@ -5,7 +5,7 @@
import os.path
import time
from collections import namedtuple
-from datetime import datetime, timedelta
+from datetime import datetime, timedelta, timezone
from xml.etree import ElementTree
import gpxpy.gpx
@@ -24,7 +24,7 @@
# logging.basicConfig(level=logging.INFO)
logger = logging.getLogger("nike_sync")
-BASE_URL = "https://api.nike.com/sport/v3/me"
+BASE_URL = "https://api.nike.com/plus/v3"
TOKEN_REFRESH_URL = "https://api.nike.com/idn/shim/oauth/2.0/token"
NIKE_CLIENT_ID = "VmhBZWFmRUdKNkc4ZTlEeFJVejhpRTUwQ1o5TWlKTUc="
NIKE_UX_ID = "Y29tLm5pa2Uuc3BvcnQucnVubmluZy5pb3MuNS4xNQ=="
@@ -36,67 +36,83 @@
class Nike:
- def __init__(self, refresh_token):
+ def __init__(self, access_token):
self.client = httpx.Client()
- # response = self.client.post(
- # TOKEN_REFRESH_URL,
- # headers=NIKE_HEADERS,
- # json={
- # "refresh_token": refresh_token,
- # "client_id": b64decode(NIKE_CLIENT_ID).decode(),
- # "grant_type": "refresh_token",
- # "ux_id": b64decode(NIKE_UX_ID).decode(),
- # },
- # timeout=60,
- # )
- # response.raise_for_status()
- #
- # access_token = response.json()["access_token"]
- access_token = "The content of 'access_token' that you just copied."
+ # HINT: if you have old nrc refresh_token un comments this liens it still works
+
+ # response = self.client.post(
+ # TOKEN_REFRESH_URL,
+ # headers=NIKE_HEADERS,
+ # json={
+ # "refresh_token": access_token, # its refresh_token for tesy here
+ # "client_id": b64decode(NIKE_CLIENT_ID).decode(),
+ # "grant_type": "refresh_token",
+ # "ux_id": b64decode(NIKE_UX_ID).decode(),
+ # },
+ # timeout=60,
+ # )
+ # response.raise_for_status()
+ # access_token = response.json()["access_token"]
+
self.client.headers.update({"Authorization": f"Bearer {access_token}"})
def get_activities_since_timestamp(self, timestamp):
- return self.request("activities/after_time", timestamp)
+ # return self.request("activities/before_id/v3/*?limit=30&types=run%2Cjogging&include_deleted=false", timestamp)
+ return self.request(
+ "activities/before_id/v3/*?limit=30&types=run%2Cjogging&include_deleted=false",
+ timestamp,
+ )
- def get_activities_since_id(self, activity_id):
+ def get_activities_before_id(self, activity_id):
+ if not activity_id:
+ activity_id = "*"
try:
- return self.request("activities/after_id", activity_id)
+ return self.request(
+ f"activities/before_id/v3/{activity_id}?limit=30&types=run%2Cjogging&include_deleted=false"
+ )
except:
print("retry")
time.sleep(3)
- return self.request("activities/after_id", activity_id)
+ return self.request(
+ f"activities/before_id/v3/{activity_id}?limit=30&types=run%2Cjogging&include_deleted=false"
+ )
def get_activity(self, activity_id):
try:
- return self.request("activity", f"{activity_id}?metrics=ALL")
+ return self.request(f"activity/{activity_id}?metrics=ALL")
except:
print("retry")
time.sleep(3)
- return self.request("activity", f"{activity_id}?metrics=ALL")
+ return self.request(f"activity/{activity_id}?metrics=ALL")
- def request(self, resource, selector):
- url = f"{BASE_URL}/{resource}/{selector}"
+ def request(self, resource):
+ url = f"{BASE_URL}/{resource}"
logger.info(f"GET: {url}")
response = self.client.get(url)
response.raise_for_status()
return response.json()
-def run(refresh_token):
+def run(refresh_token, is_continue_sync=False):
nike = Nike(refresh_token)
- last_id = get_last_id()
-
- logger.info(f"Running from ID {last_id}")
-
+ if is_continue_sync:
+ last_id_local = get_last_before_id()
+ print(f"Will continue sync before Running from ID {last_id_local}")
+ else:
+ last_id_local = None
+ before_id = None
while True:
- if last_id is not None:
- data = nike.get_activities_since_id(last_id)
- else:
- data = nike.get_activities_since_timestamp(0)
-
- last_id = data["paging"].get("after_id")
+ data = nike.get_activities_before_id(before_id)
activities = data["activities"]
+ activities_ids = [i["id"] for i in activities]
+ is_sync_done = False
+ if last_id_local in activities_ids:
+ index = activities_ids.index(last_id_local)
+ activities = activities[:index]
+ is_sync_done = True
+
+ before_id = data["paging"].get("before_id")
logger.info(f"Found {len(activities)} new activities")
@@ -114,7 +130,7 @@ def run(refresh_token):
full_activity = nike.get_activity(activity_id)
save_activity(full_activity)
- if last_id is None or not activities:
+ if is_sync_done or before_id is None or not activities:
logger.info(f"Found no new activities, finishing")
return
@@ -127,13 +143,13 @@ def save_activity(activity):
path = os.path.join(OUTPUT_DIR, f"{activity_time}.json")
try:
with open(path, "w") as f:
- json.dump(sanitise_json(activity), f, indent=4)
+ json.dump(activity, f, indent=4)
except Exception:
os.unlink(path)
raise
-def get_last_id():
+def get_last_before_id():
try:
file_names = os.listdir(OUTPUT_DIR)
file_names = [i for i in file_names if not i.startswith(".")]
@@ -148,26 +164,6 @@ def get_last_id():
return None
-def sanitise_json(d):
- """
- Gatsby's JSON loading for GraphQL queries doesn't support "." characters in
- names, which Nike uses a lot for reverse-domain notation.
-
- We recursively transform all dict keys to use underscores instead.
- """
-
- def _transform_key(key):
- return key.replace(".", "_")
-
- if isinstance(d, dict):
- return {_transform_key(k): sanitise_json(v) for k, v in d.items()}
-
- if isinstance(d, (tuple, list)):
- return [sanitise_json(x) for x in d]
-
- return d
-
-
def get_to_generate_files():
file_names = os.listdir(GPX_FOLDER)
try:
@@ -250,7 +246,9 @@ def update_points(points, update_data, update_name):
"latitude": lat["value"],
"longitude": lon["value"],
"start_time": lat["start_epoch_ms"],
- "time": datetime.utcfromtimestamp(lat["start_epoch_ms"] / 1000),
+ "time": datetime.fromtimestamp(
+ lat["start_epoch_ms"] / 1000, tz=timezone.utc
+ ),
}
)
@@ -353,9 +351,11 @@ def parse_no_gpx_data(activity):
elapsed_time = timedelta(seconds=int(activity["active_duration_ms"] / 1000))
nike_id = activity["end_epoch_ms"]
- start_date = datetime.utcfromtimestamp(activity["start_epoch_ms"] / 1000)
+ start_date = datetime.fromtimestamp(
+ activity["start_epoch_ms"] / 1000, tz=timezone.utc
+ )
start_date_local = adjust_time(start_date, BASE_TIMEZONE)
- end_date = datetime.utcfromtimestamp(activity["end_epoch_ms"] / 1000)
+ end_date = datetime.fromtimestamp(activity["end_epoch_ms"] / 1000, tz=timezone.utc)
end_date_local = adjust_time(end_date, BASE_TIMEZONE)
d = {
"id": int(nike_id),
@@ -419,8 +419,14 @@ def make_new_gpxs(files):
os.mkdir(OUTPUT_DIR)
parser = argparse.ArgumentParser()
parser.add_argument("refresh_token", help="API refresh access token for nike.com")
+ parser.add_argument(
+ "--continue-sync",
+ dest="continue_sync",
+ action="store_true",
+ help="Continue syncing from the last activity",
+ )
options = parser.parse_args()
- run(options.refresh_token)
+ run(options.refresh_token, options.continue_sync)
time.sleep(2)
files = get_to_generate_files()
diff --git a/run_page/nike_to_strava_sync.py b/run_page/nike_to_strava_sync.py
index 6ca95680d5f..6126ccc9119 100755
--- a/run_page/nike_to_strava_sync.py
+++ b/run_page/nike_to_strava_sync.py
@@ -29,8 +29,14 @@ def get_to_generate_files(last_time):
parser.add_argument("client_id", help="strava client id")
parser.add_argument("client_secret", help="strava client secret")
parser.add_argument("strava_refresh_token", help="strava refresh token")
+ parser.add_argument(
+ "--continue-sync",
+ dest="continue_sync",
+ action="store_true",
+ help="Continue syncing from the last activity",
+ )
options = parser.parse_args()
- run(options.nike_refresh_token)
+ run(options.nike_refresh_token, options.continue_sync)
time.sleep(2)
diff --git a/run_page/oppo_sync.py b/run_page/oppo_sync.py
index cf5bf803f6b..e810655ee46 100644
--- a/run_page/oppo_sync.py
+++ b/run_page/oppo_sync.py
@@ -5,7 +5,7 @@
import time
import xml.etree.ElementTree as ET
from collections import namedtuple
-from datetime import datetime, timedelta
+from datetime import datetime, timedelta, timezone
from xml.dom import minidom
import gpxpy
@@ -220,9 +220,9 @@ def parse_raw_data_to_name_tuple(sport_data, with_gpx, with_tcx):
]
polyline_str = polyline.encode(gps_data) if gps_data else ""
start_latlng = start_point(*gps_data[0]) if gps_data else None
- start_date = datetime.utcfromtimestamp(start_time / 1000)
+ start_date = datetime.fromtimestamp(start_time / 1000, tz=timezone.utc)
start_date_local = adjust_time(start_date, str(get_localzone()))
- end = datetime.utcfromtimestamp(sport_data["endTime"] / 1000)
+ end = datetime.fromtimestamp(sport_data["endTime"] / 1000, tz=timezone.utc)
end_local = adjust_time(end, str(get_localzone()))
location_country = None
if not other_data["totalTime"]:
@@ -413,7 +413,7 @@ def prepare_track_points(sport_data, with_gpx):
points_dict = {
"latitude": other_data.get("gpsPoint")[i]["latitude"],
"longitude": other_data.get("gpsPoint")[i]["longitude"],
- "time": datetime.utcfromtimestamp(temp_timestamp / 1000),
+ "time": datetime.fromtimestamp(temp_timestamp / 1000, tz=timezone.utc),
"hr": other_data.get("heartRate")[j]["value"],
}
points_dict_list.append(get_value(j, points_dict, other_data))
@@ -422,7 +422,7 @@ def prepare_track_points(sport_data, with_gpx):
for i in range(value_size):
temp_timestamp = other_data.get("heartRate")[i]["timestamp"]
- temp_date = datetime.utcfromtimestamp(temp_timestamp / 1000)
+ temp_date = datetime.fromtimestamp(temp_timestamp / 1000, tz=timezone.utc)
points_dict = {
"time": temp_date,
"hr": other_data.get("heartRate")[i]["value"],
@@ -450,7 +450,7 @@ def parse_points_to_tcx(sport_data, points_dict_list):
fit_id = str(sport_data["id"])
# local time
start_time = sport_data["startTime"]
- start_date = datetime.utcfromtimestamp(start_time / 1000)
+ start_date = datetime.fromtimestamp(start_time / 1000, tz=timezone.utc)
fit_start_time = datetime.strftime(
adjust_time(start_date, UTC_TIMEZONE), "%Y-%m-%dT%H:%M:%SZ"
)