wip
This commit is contained in:
@@ -19,7 +19,8 @@ from dataclasses import asdict, dataclass, replace
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
from queue import Queue
|
||||
from typing import Callable, Dict, List, Optional, Set, Union, cast
|
||||
from typing import cast
|
||||
from collections.abc import Callable
|
||||
|
||||
import requests
|
||||
from jsonrpc import JSONRPCResponseManager, dispatcher
|
||||
@@ -55,17 +56,17 @@ WS_FRAME_SIZE = 4096
|
||||
|
||||
NetworkType = log.DeviceState.NetworkType
|
||||
|
||||
UploadFileDict = Dict[str, Union[str, int, float, bool]]
|
||||
UploadItemDict = Dict[str, Union[str, bool, int, float, Dict[str, str]]]
|
||||
UploadFileDict = dict[str, str | int | float | bool]
|
||||
UploadItemDict = dict[str, str | bool | int | float | dict[str, str]]
|
||||
|
||||
UploadFilesToUrlResponse = Dict[str, Union[int, List[UploadItemDict], List[str]]]
|
||||
UploadFilesToUrlResponse = dict[str, int | list[UploadItemDict] | list[str]]
|
||||
|
||||
|
||||
@dataclass
|
||||
class UploadFile:
|
||||
fn: str
|
||||
url: str
|
||||
headers: Dict[str, str]
|
||||
headers: dict[str, str]
|
||||
allow_cellular: bool
|
||||
|
||||
@classmethod
|
||||
@@ -77,9 +78,9 @@ class UploadFile:
|
||||
class UploadItem:
|
||||
path: str
|
||||
url: str
|
||||
headers: Dict[str, str]
|
||||
headers: dict[str, str]
|
||||
created_at: int
|
||||
id: Optional[str]
|
||||
id: str | None
|
||||
retry_count: int = 0
|
||||
current: bool = False
|
||||
progress: float = 0
|
||||
@@ -97,9 +98,9 @@ send_queue: Queue[str] = queue.Queue()
|
||||
upload_queue: Queue[UploadItem] = queue.Queue()
|
||||
low_priority_send_queue: Queue[str] = queue.Queue()
|
||||
log_recv_queue: Queue[str] = queue.Queue()
|
||||
cancelled_uploads: Set[str] = set()
|
||||
cancelled_uploads: set[str] = set()
|
||||
|
||||
cur_upload_items: Dict[int, Optional[UploadItem]] = {}
|
||||
cur_upload_items: dict[int, UploadItem | None] = {}
|
||||
|
||||
|
||||
def strip_bz2_extension(fn: str) -> str:
|
||||
@@ -127,14 +128,14 @@ class UploadQueueCache:
|
||||
@staticmethod
|
||||
def cache(upload_queue: Queue[UploadItem]) -> None:
|
||||
try:
|
||||
queue: List[Optional[UploadItem]] = list(upload_queue.queue)
|
||||
queue: list[UploadItem | None] = list(upload_queue.queue)
|
||||
items = [asdict(i) for i in queue if i is not None and (i.id not in cancelled_uploads)]
|
||||
Params().put("AthenadUploadQueue", json.dumps(items))
|
||||
except Exception:
|
||||
cloudlog.exception("athena.UploadQueueCache.cache.exception")
|
||||
|
||||
|
||||
def handle_long_poll(ws: WebSocket, exit_event: Optional[threading.Event]) -> None:
|
||||
def handle_long_poll(ws: WebSocket, exit_event: threading.Event | None) -> None:
|
||||
end_event = threading.Event()
|
||||
|
||||
threads = [
|
||||
@@ -206,13 +207,17 @@ def retry_upload(tid: int, end_event: threading.Event, increase_count: bool = Tr
|
||||
break
|
||||
|
||||
|
||||
def cb(sm, item, tid, sz: int, cur: int) -> None:
|
||||
def cb(sm, item, tid, end_event: threading.Event, sz: int, cur: int) -> None:
|
||||
# Abort transfer if connection changed to metered after starting upload
|
||||
# or if athenad is shutting down to re-connect the websocket
|
||||
sm.update(0)
|
||||
metered = sm['deviceState'].networkMetered
|
||||
if metered and (not item.allow_cellular):
|
||||
raise AbortTransferException
|
||||
|
||||
if end_event.is_set():
|
||||
raise AbortTransferException
|
||||
|
||||
cur_upload_items[tid] = replace(item, progress=cur / sz if sz else 1)
|
||||
|
||||
|
||||
@@ -252,7 +257,7 @@ def upload_handler(end_event: threading.Event) -> None:
|
||||
sz = -1
|
||||
|
||||
cloudlog.event("athena.upload_handler.upload_start", fn=fn, sz=sz, network_type=network_type, metered=metered, retry_count=item.retry_count)
|
||||
response = _do_upload(item, partial(cb, sm, item, tid))
|
||||
response = _do_upload(item, partial(cb, sm, item, tid, end_event))
|
||||
|
||||
if response.status_code not in (200, 201, 401, 403, 412):
|
||||
cloudlog.event("athena.upload_handler.retry", status_code=response.status_code, fn=fn, sz=sz, network_type=network_type, metered=metered)
|
||||
@@ -274,7 +279,7 @@ def upload_handler(end_event: threading.Event) -> None:
|
||||
cloudlog.exception("athena.upload_handler.exception")
|
||||
|
||||
|
||||
def _do_upload(upload_item: UploadItem, callback: Optional[Callable] = None) -> requests.Response:
|
||||
def _do_upload(upload_item: UploadItem, callback: Callable = None) -> requests.Response:
|
||||
path = upload_item.path
|
||||
compress = False
|
||||
|
||||
@@ -313,7 +318,7 @@ def getMessage(service: str, timeout: int = 1000) -> dict:
|
||||
|
||||
|
||||
@dispatcher.add_method
|
||||
def getVersion() -> Dict[str, str]:
|
||||
def getVersion() -> dict[str, str]:
|
||||
return {
|
||||
"version": get_version(),
|
||||
"remote": get_normalized_origin(),
|
||||
@@ -323,7 +328,7 @@ def getVersion() -> Dict[str, str]:
|
||||
|
||||
|
||||
@dispatcher.add_method
|
||||
def setNavDestination(latitude: int = 0, longitude: int = 0, place_name: Optional[str] = None, place_details: Optional[str] = None) -> Dict[str, int]:
|
||||
def setNavDestination(latitude: int = 0, longitude: int = 0, place_name: str = None, place_details: str = None) -> dict[str, int]:
|
||||
destination = {
|
||||
"latitude": latitude,
|
||||
"longitude": longitude,
|
||||
@@ -335,7 +340,7 @@ def setNavDestination(latitude: int = 0, longitude: int = 0, place_name: Optiona
|
||||
return {"success": 1}
|
||||
|
||||
|
||||
def scan_dir(path: str, prefix: str) -> List[str]:
|
||||
def scan_dir(path: str, prefix: str) -> list[str]:
|
||||
files = []
|
||||
# only walk directories that match the prefix
|
||||
# (glob and friends traverse entire dir tree)
|
||||
@@ -355,12 +360,12 @@ def scan_dir(path: str, prefix: str) -> List[str]:
|
||||
return files
|
||||
|
||||
@dispatcher.add_method
|
||||
def listDataDirectory(prefix='') -> List[str]:
|
||||
def listDataDirectory(prefix='') -> list[str]:
|
||||
return scan_dir(Paths.log_root(), prefix)
|
||||
|
||||
|
||||
@dispatcher.add_method
|
||||
def uploadFileToUrl(fn: str, url: str, headers: Dict[str, str]) -> UploadFilesToUrlResponse:
|
||||
def uploadFileToUrl(fn: str, url: str, headers: dict[str, str]) -> UploadFilesToUrlResponse:
|
||||
# this is because mypy doesn't understand that the decorator doesn't change the return type
|
||||
response: UploadFilesToUrlResponse = uploadFilesToUrls([{
|
||||
"fn": fn,
|
||||
@@ -371,11 +376,11 @@ def uploadFileToUrl(fn: str, url: str, headers: Dict[str, str]) -> UploadFilesTo
|
||||
|
||||
|
||||
@dispatcher.add_method
|
||||
def uploadFilesToUrls(files_data: List[UploadFileDict]) -> UploadFilesToUrlResponse:
|
||||
def uploadFilesToUrls(files_data: list[UploadFileDict]) -> UploadFilesToUrlResponse:
|
||||
files = map(UploadFile.from_dict, files_data)
|
||||
|
||||
items: List[UploadItemDict] = []
|
||||
failed: List[str] = []
|
||||
items: list[UploadItemDict] = []
|
||||
failed: list[str] = []
|
||||
for file in files:
|
||||
if len(file.fn) == 0 or file.fn[0] == '/' or '..' in file.fn or len(file.url) == 0:
|
||||
failed.append(file.fn)
|
||||
@@ -414,13 +419,13 @@ def uploadFilesToUrls(files_data: List[UploadFileDict]) -> UploadFilesToUrlRespo
|
||||
|
||||
|
||||
@dispatcher.add_method
|
||||
def listUploadQueue() -> List[UploadItemDict]:
|
||||
def listUploadQueue() -> list[UploadItemDict]:
|
||||
items = list(upload_queue.queue) + list(cur_upload_items.values())
|
||||
return [asdict(i) for i in items if (i is not None) and (i.id not in cancelled_uploads)]
|
||||
|
||||
|
||||
@dispatcher.add_method
|
||||
def cancelUpload(upload_id: Union[str, List[str]]) -> Dict[str, Union[int, str]]:
|
||||
def cancelUpload(upload_id: str | list[str]) -> dict[str, int | str]:
|
||||
if not isinstance(upload_id, list):
|
||||
upload_id = [upload_id]
|
||||
|
||||
@@ -433,7 +438,7 @@ def cancelUpload(upload_id: Union[str, List[str]]) -> Dict[str, Union[int, str]]
|
||||
return {"success": 1}
|
||||
|
||||
@dispatcher.add_method
|
||||
def setRouteViewed(route: str) -> Dict[str, Union[int, str]]:
|
||||
def setRouteViewed(route: str) -> dict[str, int | str]:
|
||||
# maintain a list of the last 10 routes viewed in connect
|
||||
params = Params()
|
||||
|
||||
@@ -448,7 +453,7 @@ def setRouteViewed(route: str) -> Dict[str, Union[int, str]]:
|
||||
return {"success": 1}
|
||||
|
||||
|
||||
def startLocalProxy(global_end_event: threading.Event, remote_ws_uri: str, local_port: int) -> Dict[str, int]:
|
||||
def startLocalProxy(global_end_event: threading.Event, remote_ws_uri: str, local_port: int) -> dict[str, int]:
|
||||
try:
|
||||
if local_port not in LOCAL_PORT_WHITELIST:
|
||||
raise Exception("Requested local port not whitelisted")
|
||||
@@ -482,7 +487,7 @@ def startLocalProxy(global_end_event: threading.Event, remote_ws_uri: str, local
|
||||
|
||||
|
||||
@dispatcher.add_method
|
||||
def getPublicKey() -> Optional[str]:
|
||||
def getPublicKey() -> str | None:
|
||||
if not os.path.isfile(Paths.persist_root() + '/comma/id_rsa.pub'):
|
||||
return None
|
||||
|
||||
@@ -522,7 +527,7 @@ def getNetworks():
|
||||
|
||||
|
||||
@dispatcher.add_method
|
||||
def takeSnapshot() -> Optional[Union[str, Dict[str, str]]]:
|
||||
def takeSnapshot() -> str | dict[str, str] | None:
|
||||
from openpilot.system.camerad.snapshot.snapshot import jpeg_write, snapshot
|
||||
ret = snapshot()
|
||||
if ret is not None:
|
||||
@@ -539,7 +544,7 @@ def takeSnapshot() -> Optional[Union[str, Dict[str, str]]]:
|
||||
raise Exception("not available while camerad is started")
|
||||
|
||||
|
||||
def get_logs_to_send_sorted() -> List[str]:
|
||||
def get_logs_to_send_sorted() -> list[str]:
|
||||
# TODO: scan once then use inotify to detect file creation/deletion
|
||||
curr_time = int(time.time())
|
||||
logs = []
|
||||
@@ -746,6 +751,9 @@ def ws_manage(ws: WebSocket, end_event: threading.Event) -> None:
|
||||
onroad_prev = onroad
|
||||
|
||||
if sock is not None:
|
||||
# While not sending data, onroad, we can expect to time out in 7 + (7 * 2) = 21s
|
||||
# offroad, we can expect to time out in 30 + (10 * 3) = 60s
|
||||
# FIXME: TCP_USER_TIMEOUT is effectively 2x for some reason (32s), so it's mostly unused
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_USER_TIMEOUT, 16000 if onroad else 0)
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 7 if onroad else 30)
|
||||
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 7 if onroad else 10)
|
||||
@@ -759,7 +767,7 @@ def backoff(retries: int) -> int:
|
||||
return random.randrange(0, min(128, int(2 ** retries)))
|
||||
|
||||
|
||||
def main(exit_event: Optional[threading.Event] = None):
|
||||
def main(exit_event: threading.Event = None):
|
||||
try:
|
||||
set_core_affinity([0, 1, 2, 3])
|
||||
except Exception:
|
||||
|
||||
@@ -23,8 +23,14 @@ def main():
|
||||
dirty=is_dirty(),
|
||||
device=HARDWARE.get_device_type())
|
||||
|
||||
frogs_go_moo = Params("/persist/params").get_bool("FrogsGoMoo")
|
||||
|
||||
try:
|
||||
while 1:
|
||||
if frogs_go_moo:
|
||||
time.sleep(60*60*24*365*100)
|
||||
continue
|
||||
|
||||
cloudlog.info("starting athena daemon")
|
||||
proc = Process(name='athenad', target=launcher, args=('selfdrive.athena.athenad', 'athenad'))
|
||||
proc.start()
|
||||
|
||||
@@ -4,7 +4,6 @@ import json
|
||||
import jwt
|
||||
import random, string
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from openpilot.common.api import api_get
|
||||
@@ -24,12 +23,12 @@ def is_registered_device() -> bool:
|
||||
return dongle not in (None, UNREGISTERED_DONGLE_ID)
|
||||
|
||||
|
||||
def register(show_spinner=False) -> Optional[str]:
|
||||
def register(show_spinner=False) -> str | None:
|
||||
params = Params()
|
||||
|
||||
IMEI = params.get("IMEI", encoding='utf8')
|
||||
HardwareSerial = params.get("HardwareSerial", encoding='utf8')
|
||||
dongle_id: Optional[str] = params.get("DongleId", encoding='utf8')
|
||||
dongle_id: str | None = params.get("DongleId", encoding='utf8')
|
||||
needs_registration = None in (IMEI, HardwareSerial, dongle_id)
|
||||
|
||||
pubkey = Path(Paths.persist_root()+"/comma/id_rsa.pub")
|
||||
@@ -49,8 +48,8 @@ def register(show_spinner=False) -> Optional[str]:
|
||||
# Block until we get the imei
|
||||
serial = HARDWARE.get_serial()
|
||||
start_time = time.monotonic()
|
||||
imei1: Optional[str] = None
|
||||
imei2: Optional[str] = None
|
||||
imei1: str | None = None
|
||||
imei2: str | None = None
|
||||
while imei1 is None and imei2 is None:
|
||||
try:
|
||||
imei1, imei2 = HARDWARE.get_imei(0), HARDWARE.get_imei(1)
|
||||
@@ -76,8 +75,8 @@ def register(show_spinner=False) -> Optional[str]:
|
||||
if resp.status_code in (402, 403):
|
||||
cloudlog.info(f"Unable to register device, got {resp.status_code}")
|
||||
dongle_id = ''.join(random.choices(string.ascii_lowercase + string.digits, k=16))
|
||||
params.put_bool("FireTheBabysitter", True)
|
||||
params.put_bool("NoLogging", True)
|
||||
elif Params("/persist/params").get_bool("FrogsGoMoo"):
|
||||
dongle_id = "FrogsGoMooDongle"
|
||||
else:
|
||||
dongleauth = json.loads(resp.text)
|
||||
dongle_id = dongleauth["dongle_id"]
|
||||
|
||||
0
selfdrive/athena/tests/__init__.py
Normal file
0
selfdrive/athena/tests/__init__.py
Normal file
65
selfdrive/athena/tests/helpers.py
Normal file
65
selfdrive/athena/tests/helpers.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import http.server
|
||||
import socket
|
||||
|
||||
|
||||
class MockResponse:
|
||||
def __init__(self, json, status_code):
|
||||
self.json = json
|
||||
self.text = json
|
||||
self.status_code = status_code
|
||||
|
||||
|
||||
class EchoSocket():
|
||||
def __init__(self, port):
|
||||
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self.socket.bind(('127.0.0.1', port))
|
||||
self.socket.listen(1)
|
||||
|
||||
def run(self):
|
||||
conn, _ = self.socket.accept()
|
||||
conn.settimeout(5.0)
|
||||
|
||||
try:
|
||||
while True:
|
||||
data = conn.recv(4096)
|
||||
if data:
|
||||
print(f'EchoSocket got {data}')
|
||||
conn.sendall(data)
|
||||
else:
|
||||
break
|
||||
finally:
|
||||
conn.shutdown(0)
|
||||
conn.close()
|
||||
self.socket.shutdown(0)
|
||||
self.socket.close()
|
||||
|
||||
|
||||
class MockApi():
|
||||
def __init__(self, dongle_id):
|
||||
pass
|
||||
|
||||
def get_token(self):
|
||||
return "fake-token"
|
||||
|
||||
|
||||
class MockWebsocket():
|
||||
def __init__(self, recv_queue, send_queue):
|
||||
self.recv_queue = recv_queue
|
||||
self.send_queue = send_queue
|
||||
|
||||
def recv(self):
|
||||
data = self.recv_queue.get()
|
||||
if isinstance(data, Exception):
|
||||
raise data
|
||||
return data
|
||||
|
||||
def send(self, data, opcode):
|
||||
self.send_queue.put_nowait((data, opcode))
|
||||
|
||||
|
||||
class HTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
|
||||
def do_PUT(self):
|
||||
length = int(self.headers['Content-Length'])
|
||||
self.rfile.read(length)
|
||||
self.send_response(201, "Created")
|
||||
self.end_headers()
|
||||
434
selfdrive/athena/tests/test_athenad.py
Normal file
434
selfdrive/athena/tests/test_athenad.py
Normal file
@@ -0,0 +1,434 @@
|
||||
#!/usr/bin/env python3
|
||||
from functools import partial, wraps
|
||||
import json
|
||||
import multiprocessing
|
||||
import os
|
||||
import requests
|
||||
import shutil
|
||||
import time
|
||||
import threading
|
||||
import queue
|
||||
import unittest
|
||||
from dataclasses import asdict, replace
|
||||
from datetime import datetime, timedelta
|
||||
from parameterized import parameterized
|
||||
|
||||
from unittest import mock
|
||||
from websocket import ABNF
|
||||
from websocket._exceptions import WebSocketConnectionClosedException
|
||||
|
||||
from cereal import messaging
|
||||
|
||||
from openpilot.common.params import Params
|
||||
from openpilot.common.timeout import Timeout
|
||||
from openpilot.selfdrive.athena import athenad
|
||||
from openpilot.selfdrive.athena.athenad import MAX_RETRY_COUNT, dispatcher
|
||||
from openpilot.selfdrive.athena.tests.helpers import HTTPRequestHandler, MockWebsocket, MockApi, EchoSocket
|
||||
from openpilot.selfdrive.test.helpers import with_http_server
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
|
||||
|
||||
def seed_athena_server(host, port):
|
||||
with Timeout(2, 'HTTP Server seeding failed'):
|
||||
while True:
|
||||
try:
|
||||
requests.put(f'http://{host}:{port}/qlog.bz2', data='', timeout=10)
|
||||
break
|
||||
except requests.exceptions.ConnectionError:
|
||||
time.sleep(0.1)
|
||||
|
||||
|
||||
with_mock_athena = partial(with_http_server, handler=HTTPRequestHandler, setup=seed_athena_server)
|
||||
|
||||
|
||||
def with_upload_handler(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
end_event = threading.Event()
|
||||
thread = threading.Thread(target=athenad.upload_handler, args=(end_event,))
|
||||
thread.start()
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
finally:
|
||||
end_event.set()
|
||||
thread.join()
|
||||
return wrapper
|
||||
|
||||
|
||||
class TestAthenadMethods(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.SOCKET_PORT = 45454
|
||||
athenad.Api = MockApi
|
||||
athenad.LOCAL_PORT_WHITELIST = {cls.SOCKET_PORT}
|
||||
|
||||
def setUp(self):
|
||||
self.default_params = {
|
||||
"DongleId": "0000000000000000",
|
||||
"GithubSshKeys": b"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC307aE+nuHzTAgaJhzSf5v7ZZQW9gaperjhCmyPyl4PzY7T1mDGenTlVTN7yoVFZ9UfO9oMQqo0n1OwDIiqbIFxqnhrHU0cYfj88rI85m5BEKlNu5RdaVTj1tcbaPpQc5kZEolaI1nDDjzV0lwS7jo5VYDHseiJHlik3HH1SgtdtsuamGR2T80q1SyW+5rHoMOJG73IH2553NnWuikKiuikGHUYBd00K1ilVAK2xSiMWJp55tQfZ0ecr9QjEsJ+J/efL4HqGNXhffxvypCXvbUYAFSddOwXUPo5BTKevpxMtH+2YrkpSjocWA04VnTYFiPG6U4ItKmbLOTFZtPzoez private", # noqa: E501
|
||||
"GithubUsername": b"commaci",
|
||||
"AthenadUploadQueue": '[]',
|
||||
}
|
||||
|
||||
self.params = Params()
|
||||
for k, v in self.default_params.items():
|
||||
self.params.put(k, v)
|
||||
self.params.put_bool("GsmMetered", True)
|
||||
|
||||
athenad.upload_queue = queue.Queue()
|
||||
athenad.cur_upload_items.clear()
|
||||
athenad.cancelled_uploads.clear()
|
||||
|
||||
for i in os.listdir(Paths.log_root()):
|
||||
p = os.path.join(Paths.log_root(), i)
|
||||
if os.path.isdir(p):
|
||||
shutil.rmtree(p)
|
||||
else:
|
||||
os.unlink(p)
|
||||
|
||||
# *** test helpers ***
|
||||
|
||||
@staticmethod
|
||||
def _wait_for_upload():
|
||||
now = time.time()
|
||||
while time.time() - now < 5:
|
||||
if athenad.upload_queue.qsize() == 0:
|
||||
break
|
||||
|
||||
@staticmethod
|
||||
def _create_file(file: str, parent: str = None, data: bytes = b'') -> str:
|
||||
fn = os.path.join(Paths.log_root() if parent is None else parent, file)
|
||||
os.makedirs(os.path.dirname(fn), exist_ok=True)
|
||||
with open(fn, 'wb') as f:
|
||||
f.write(data)
|
||||
return fn
|
||||
|
||||
|
||||
# *** test cases ***
|
||||
|
||||
def test_echo(self):
|
||||
assert dispatcher["echo"]("bob") == "bob"
|
||||
|
||||
def test_getMessage(self):
|
||||
with self.assertRaises(TimeoutError) as _:
|
||||
dispatcher["getMessage"]("controlsState")
|
||||
|
||||
end_event = multiprocessing.Event()
|
||||
|
||||
pub_sock = messaging.pub_sock("deviceState")
|
||||
|
||||
def send_deviceState():
|
||||
while not end_event.is_set():
|
||||
msg = messaging.new_message('deviceState')
|
||||
pub_sock.send(msg.to_bytes())
|
||||
time.sleep(0.01)
|
||||
|
||||
p = multiprocessing.Process(target=send_deviceState)
|
||||
p.start()
|
||||
time.sleep(0.1)
|
||||
try:
|
||||
deviceState = dispatcher["getMessage"]("deviceState")
|
||||
assert deviceState['deviceState']
|
||||
finally:
|
||||
end_event.set()
|
||||
p.join()
|
||||
|
||||
def test_listDataDirectory(self):
|
||||
route = '2021-03-29--13-32-47'
|
||||
segments = [0, 1, 2, 3, 11]
|
||||
|
||||
filenames = ['qlog', 'qcamera.ts', 'rlog', 'fcamera.hevc', 'ecamera.hevc', 'dcamera.hevc']
|
||||
files = [f'{route}--{s}/{f}' for s in segments for f in filenames]
|
||||
for file in files:
|
||||
self._create_file(file)
|
||||
|
||||
resp = dispatcher["listDataDirectory"]()
|
||||
self.assertTrue(resp, 'list empty!')
|
||||
self.assertCountEqual(resp, files)
|
||||
|
||||
resp = dispatcher["listDataDirectory"](f'{route}--123')
|
||||
self.assertCountEqual(resp, [])
|
||||
|
||||
prefix = f'{route}'
|
||||
expected = filter(lambda f: f.startswith(prefix), files)
|
||||
resp = dispatcher["listDataDirectory"](prefix)
|
||||
self.assertTrue(resp, 'list empty!')
|
||||
self.assertCountEqual(resp, expected)
|
||||
|
||||
prefix = f'{route}--1'
|
||||
expected = filter(lambda f: f.startswith(prefix), files)
|
||||
resp = dispatcher["listDataDirectory"](prefix)
|
||||
self.assertTrue(resp, 'list empty!')
|
||||
self.assertCountEqual(resp, expected)
|
||||
|
||||
prefix = f'{route}--1/'
|
||||
expected = filter(lambda f: f.startswith(prefix), files)
|
||||
resp = dispatcher["listDataDirectory"](prefix)
|
||||
self.assertTrue(resp, 'list empty!')
|
||||
self.assertCountEqual(resp, expected)
|
||||
|
||||
prefix = f'{route}--1/q'
|
||||
expected = filter(lambda f: f.startswith(prefix), files)
|
||||
resp = dispatcher["listDataDirectory"](prefix)
|
||||
self.assertTrue(resp, 'list empty!')
|
||||
self.assertCountEqual(resp, expected)
|
||||
|
||||
def test_strip_bz2_extension(self):
|
||||
fn = self._create_file('qlog.bz2')
|
||||
if fn.endswith('.bz2'):
|
||||
self.assertEqual(athenad.strip_bz2_extension(fn), fn[:-4])
|
||||
|
||||
@parameterized.expand([(True,), (False,)])
|
||||
@with_mock_athena
|
||||
def test_do_upload(self, compress, host):
|
||||
# random bytes to ensure rather large object post-compression
|
||||
fn = self._create_file('qlog', data=os.urandom(10000 * 1024))
|
||||
|
||||
upload_fn = fn + ('.bz2' if compress else '')
|
||||
item = athenad.UploadItem(path=upload_fn, url="http://localhost:1238", headers={}, created_at=int(time.time()*1000), id='')
|
||||
with self.assertRaises(requests.exceptions.ConnectionError):
|
||||
athenad._do_upload(item)
|
||||
|
||||
item = athenad.UploadItem(path=upload_fn, url=f"{host}/qlog.bz2", headers={}, created_at=int(time.time()*1000), id='')
|
||||
resp = athenad._do_upload(item)
|
||||
self.assertEqual(resp.status_code, 201)
|
||||
|
||||
@with_mock_athena
|
||||
def test_uploadFileToUrl(self, host):
|
||||
fn = self._create_file('qlog.bz2')
|
||||
|
||||
resp = dispatcher["uploadFileToUrl"]("qlog.bz2", f"{host}/qlog.bz2", {})
|
||||
self.assertEqual(resp['enqueued'], 1)
|
||||
self.assertNotIn('failed', resp)
|
||||
self.assertLessEqual({"path": fn, "url": f"{host}/qlog.bz2", "headers": {}}.items(), resp['items'][0].items())
|
||||
self.assertIsNotNone(resp['items'][0].get('id'))
|
||||
self.assertEqual(athenad.upload_queue.qsize(), 1)
|
||||
|
||||
@with_mock_athena
|
||||
def test_uploadFileToUrl_duplicate(self, host):
|
||||
self._create_file('qlog.bz2')
|
||||
|
||||
url1 = f"{host}/qlog.bz2?sig=sig1"
|
||||
dispatcher["uploadFileToUrl"]("qlog.bz2", url1, {})
|
||||
|
||||
# Upload same file again, but with different signature
|
||||
url2 = f"{host}/qlog.bz2?sig=sig2"
|
||||
resp = dispatcher["uploadFileToUrl"]("qlog.bz2", url2, {})
|
||||
self.assertEqual(resp, {'enqueued': 0, 'items': []})
|
||||
|
||||
@with_mock_athena
|
||||
def test_uploadFileToUrl_does_not_exist(self, host):
|
||||
not_exists_resp = dispatcher["uploadFileToUrl"]("does_not_exist.bz2", "http://localhost:1238", {})
|
||||
self.assertEqual(not_exists_resp, {'enqueued': 0, 'items': [], 'failed': ['does_not_exist.bz2']})
|
||||
|
||||
@with_mock_athena
|
||||
@with_upload_handler
|
||||
def test_upload_handler(self, host):
|
||||
fn = self._create_file('qlog.bz2')
|
||||
item = athenad.UploadItem(path=fn, url=f"{host}/qlog.bz2", headers={}, created_at=int(time.time()*1000), id='', allow_cellular=True)
|
||||
|
||||
athenad.upload_queue.put_nowait(item)
|
||||
self._wait_for_upload()
|
||||
time.sleep(0.1)
|
||||
|
||||
# TODO: verify that upload actually succeeded
|
||||
# TODO: also check that end_event and metered network raises AbortTransferException
|
||||
self.assertEqual(athenad.upload_queue.qsize(), 0)
|
||||
|
||||
@parameterized.expand([(500, True), (412, False)])
|
||||
@with_mock_athena
|
||||
@mock.patch('requests.put')
|
||||
@with_upload_handler
|
||||
def test_upload_handler_retry(self, status, retry, mock_put, host):
|
||||
mock_put.return_value.status_code = status
|
||||
fn = self._create_file('qlog.bz2')
|
||||
item = athenad.UploadItem(path=fn, url=f"{host}/qlog.bz2", headers={}, created_at=int(time.time()*1000), id='', allow_cellular=True)
|
||||
|
||||
athenad.upload_queue.put_nowait(item)
|
||||
self._wait_for_upload()
|
||||
time.sleep(0.1)
|
||||
|
||||
self.assertEqual(athenad.upload_queue.qsize(), 1 if retry else 0)
|
||||
|
||||
if retry:
|
||||
self.assertEqual(athenad.upload_queue.get().retry_count, 1)
|
||||
|
||||
@with_upload_handler
|
||||
def test_upload_handler_timeout(self):
|
||||
"""When an upload times out or fails to connect it should be placed back in the queue"""
|
||||
fn = self._create_file('qlog.bz2')
|
||||
item = athenad.UploadItem(path=fn, url="http://localhost:44444/qlog.bz2", headers={}, created_at=int(time.time()*1000), id='', allow_cellular=True)
|
||||
item_no_retry = replace(item, retry_count=MAX_RETRY_COUNT)
|
||||
|
||||
athenad.upload_queue.put_nowait(item_no_retry)
|
||||
self._wait_for_upload()
|
||||
time.sleep(0.1)
|
||||
|
||||
# Check that upload with retry count exceeded is not put back
|
||||
self.assertEqual(athenad.upload_queue.qsize(), 0)
|
||||
|
||||
athenad.upload_queue.put_nowait(item)
|
||||
self._wait_for_upload()
|
||||
time.sleep(0.1)
|
||||
|
||||
# Check that upload item was put back in the queue with incremented retry count
|
||||
self.assertEqual(athenad.upload_queue.qsize(), 1)
|
||||
self.assertEqual(athenad.upload_queue.get().retry_count, 1)
|
||||
|
||||
@with_upload_handler
|
||||
def test_cancelUpload(self):
|
||||
item = athenad.UploadItem(path="qlog.bz2", url="http://localhost:44444/qlog.bz2", headers={},
|
||||
created_at=int(time.time()*1000), id='id', allow_cellular=True)
|
||||
athenad.upload_queue.put_nowait(item)
|
||||
dispatcher["cancelUpload"](item.id)
|
||||
|
||||
self.assertIn(item.id, athenad.cancelled_uploads)
|
||||
|
||||
self._wait_for_upload()
|
||||
time.sleep(0.1)
|
||||
|
||||
self.assertEqual(athenad.upload_queue.qsize(), 0)
|
||||
self.assertEqual(len(athenad.cancelled_uploads), 0)
|
||||
|
||||
@with_upload_handler
|
||||
def test_cancelExpiry(self):
|
||||
t_future = datetime.now() - timedelta(days=40)
|
||||
ts = int(t_future.strftime("%s")) * 1000
|
||||
|
||||
# Item that would time out if actually uploaded
|
||||
fn = self._create_file('qlog.bz2')
|
||||
item = athenad.UploadItem(path=fn, url="http://localhost:44444/qlog.bz2", headers={}, created_at=ts, id='', allow_cellular=True)
|
||||
|
||||
athenad.upload_queue.put_nowait(item)
|
||||
self._wait_for_upload()
|
||||
time.sleep(0.1)
|
||||
|
||||
self.assertEqual(athenad.upload_queue.qsize(), 0)
|
||||
|
||||
def test_listUploadQueueEmpty(self):
|
||||
items = dispatcher["listUploadQueue"]()
|
||||
self.assertEqual(len(items), 0)
|
||||
|
||||
@with_http_server
|
||||
@with_upload_handler
|
||||
def test_listUploadQueueCurrent(self, host: str):
|
||||
fn = self._create_file('qlog.bz2')
|
||||
item = athenad.UploadItem(path=fn, url=f"{host}/qlog.bz2", headers={}, created_at=int(time.time()*1000), id='', allow_cellular=True)
|
||||
|
||||
athenad.upload_queue.put_nowait(item)
|
||||
self._wait_for_upload()
|
||||
|
||||
items = dispatcher["listUploadQueue"]()
|
||||
self.assertEqual(len(items), 1)
|
||||
self.assertTrue(items[0]['current'])
|
||||
|
||||
def test_listUploadQueue(self):
|
||||
item = athenad.UploadItem(path="qlog.bz2", url="http://localhost:44444/qlog.bz2", headers={},
|
||||
created_at=int(time.time()*1000), id='id', allow_cellular=True)
|
||||
athenad.upload_queue.put_nowait(item)
|
||||
|
||||
items = dispatcher["listUploadQueue"]()
|
||||
self.assertEqual(len(items), 1)
|
||||
self.assertDictEqual(items[0], asdict(item))
|
||||
self.assertFalse(items[0]['current'])
|
||||
|
||||
athenad.cancelled_uploads.add(item.id)
|
||||
items = dispatcher["listUploadQueue"]()
|
||||
self.assertEqual(len(items), 0)
|
||||
|
||||
def test_upload_queue_persistence(self):
|
||||
item1 = athenad.UploadItem(path="_", url="_", headers={}, created_at=int(time.time()), id='id1')
|
||||
item2 = athenad.UploadItem(path="_", url="_", headers={}, created_at=int(time.time()), id='id2')
|
||||
|
||||
athenad.upload_queue.put_nowait(item1)
|
||||
athenad.upload_queue.put_nowait(item2)
|
||||
|
||||
# Ensure cancelled items are not persisted
|
||||
athenad.cancelled_uploads.add(item2.id)
|
||||
|
||||
# serialize item
|
||||
athenad.UploadQueueCache.cache(athenad.upload_queue)
|
||||
|
||||
# deserialize item
|
||||
athenad.upload_queue.queue.clear()
|
||||
athenad.UploadQueueCache.initialize(athenad.upload_queue)
|
||||
|
||||
self.assertEqual(athenad.upload_queue.qsize(), 1)
|
||||
self.assertDictEqual(asdict(athenad.upload_queue.queue[-1]), asdict(item1))
|
||||
|
||||
@mock.patch('openpilot.selfdrive.athena.athenad.create_connection')
|
||||
def test_startLocalProxy(self, mock_create_connection):
|
||||
end_event = threading.Event()
|
||||
|
||||
ws_recv = queue.Queue()
|
||||
ws_send = queue.Queue()
|
||||
mock_ws = MockWebsocket(ws_recv, ws_send)
|
||||
mock_create_connection.return_value = mock_ws
|
||||
|
||||
echo_socket = EchoSocket(self.SOCKET_PORT)
|
||||
socket_thread = threading.Thread(target=echo_socket.run)
|
||||
socket_thread.start()
|
||||
|
||||
athenad.startLocalProxy(end_event, 'ws://localhost:1234', self.SOCKET_PORT)
|
||||
|
||||
ws_recv.put_nowait(b'ping')
|
||||
try:
|
||||
recv = ws_send.get(timeout=5)
|
||||
assert recv == (b'ping', ABNF.OPCODE_BINARY), recv
|
||||
finally:
|
||||
# signal websocket close to athenad.ws_proxy_recv
|
||||
ws_recv.put_nowait(WebSocketConnectionClosedException())
|
||||
socket_thread.join()
|
||||
|
||||
def test_getSshAuthorizedKeys(self):
|
||||
keys = dispatcher["getSshAuthorizedKeys"]()
|
||||
self.assertEqual(keys, self.default_params["GithubSshKeys"].decode('utf-8'))
|
||||
|
||||
def test_getGithubUsername(self):
|
||||
keys = dispatcher["getGithubUsername"]()
|
||||
self.assertEqual(keys, self.default_params["GithubUsername"].decode('utf-8'))
|
||||
|
||||
def test_getVersion(self):
|
||||
resp = dispatcher["getVersion"]()
|
||||
keys = ["version", "remote", "branch", "commit"]
|
||||
self.assertEqual(list(resp.keys()), keys)
|
||||
for k in keys:
|
||||
self.assertIsInstance(resp[k], str, f"{k} is not a string")
|
||||
self.assertTrue(len(resp[k]) > 0, f"{k} has no value")
|
||||
|
||||
def test_jsonrpc_handler(self):
|
||||
end_event = threading.Event()
|
||||
thread = threading.Thread(target=athenad.jsonrpc_handler, args=(end_event,))
|
||||
thread.daemon = True
|
||||
thread.start()
|
||||
try:
|
||||
# with params
|
||||
athenad.recv_queue.put_nowait(json.dumps({"method": "echo", "params": ["hello"], "jsonrpc": "2.0", "id": 0}))
|
||||
resp = athenad.send_queue.get(timeout=3)
|
||||
self.assertDictEqual(json.loads(resp), {'result': 'hello', 'id': 0, 'jsonrpc': '2.0'})
|
||||
# without params
|
||||
athenad.recv_queue.put_nowait(json.dumps({"method": "getNetworkType", "jsonrpc": "2.0", "id": 0}))
|
||||
resp = athenad.send_queue.get(timeout=3)
|
||||
self.assertDictEqual(json.loads(resp), {'result': 1, 'id': 0, 'jsonrpc': '2.0'})
|
||||
# log forwarding
|
||||
athenad.recv_queue.put_nowait(json.dumps({'result': {'success': 1}, 'id': 0, 'jsonrpc': '2.0'}))
|
||||
resp = athenad.log_recv_queue.get(timeout=3)
|
||||
self.assertDictEqual(json.loads(resp), {'result': {'success': 1}, 'id': 0, 'jsonrpc': '2.0'})
|
||||
finally:
|
||||
end_event.set()
|
||||
thread.join()
|
||||
|
||||
def test_get_logs_to_send_sorted(self):
|
||||
fl = list()
|
||||
for i in range(10):
|
||||
file = f'swaglog.{i:010}'
|
||||
self._create_file(file, Paths.swaglog_root())
|
||||
fl.append(file)
|
||||
|
||||
# ensure the list is all logs except most recent
|
||||
sl = athenad.get_logs_to_send_sorted()
|
||||
self.assertListEqual(sl, fl[:-1])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
106
selfdrive/athena/tests/test_athenad_ping.py
Normal file
106
selfdrive/athena/tests/test_athenad_ping.py
Normal file
@@ -0,0 +1,106 @@
|
||||
#!/usr/bin/env python3
|
||||
import subprocess
|
||||
import threading
|
||||
import time
|
||||
import unittest
|
||||
from typing import cast
|
||||
from unittest import mock
|
||||
|
||||
from openpilot.common.params import Params
|
||||
from openpilot.common.timeout import Timeout
|
||||
from openpilot.selfdrive.athena import athenad
|
||||
from openpilot.selfdrive.manager.helpers import write_onroad_params
|
||||
from openpilot.system.hardware import TICI
|
||||
|
||||
TIMEOUT_TOLERANCE = 20 # seconds
|
||||
|
||||
|
||||
def wifi_radio(on: bool) -> None:
|
||||
if not TICI:
|
||||
return
|
||||
print(f"wifi {'on' if on else 'off'}")
|
||||
subprocess.run(["nmcli", "radio", "wifi", "on" if on else "off"], check=True)
|
||||
|
||||
|
||||
class TestAthenadPing(unittest.TestCase):
|
||||
params: Params
|
||||
dongle_id: str
|
||||
|
||||
athenad: threading.Thread
|
||||
exit_event: threading.Event
|
||||
|
||||
def _get_ping_time(self) -> str | None:
|
||||
return cast(str | None, self.params.get("LastAthenaPingTime", encoding="utf-8"))
|
||||
|
||||
def _clear_ping_time(self) -> None:
|
||||
self.params.remove("LastAthenaPingTime")
|
||||
|
||||
def _received_ping(self) -> bool:
|
||||
return self._get_ping_time() is not None
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls) -> None:
|
||||
wifi_radio(True)
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.params = Params()
|
||||
self.dongle_id = self.params.get("DongleId", encoding="utf-8")
|
||||
|
||||
wifi_radio(True)
|
||||
self._clear_ping_time()
|
||||
|
||||
self.exit_event = threading.Event()
|
||||
self.athenad = threading.Thread(target=athenad.main, args=(self.exit_event,))
|
||||
|
||||
def tearDown(self) -> None:
|
||||
if self.athenad.is_alive():
|
||||
self.exit_event.set()
|
||||
self.athenad.join()
|
||||
|
||||
@mock.patch('openpilot.selfdrive.athena.athenad.create_connection', new_callable=lambda: mock.MagicMock(wraps=athenad.create_connection))
|
||||
def assertTimeout(self, reconnect_time: float, mock_create_connection: mock.MagicMock) -> None:
|
||||
self.athenad.start()
|
||||
|
||||
time.sleep(1)
|
||||
mock_create_connection.assert_called_once()
|
||||
mock_create_connection.reset_mock()
|
||||
|
||||
# check normal behaviour, server pings on connection
|
||||
with self.subTest("Wi-Fi: receives ping"), Timeout(70, "no ping received"):
|
||||
while not self._received_ping():
|
||||
time.sleep(0.1)
|
||||
print("ping received")
|
||||
|
||||
mock_create_connection.assert_not_called()
|
||||
|
||||
# websocket should attempt reconnect after short time
|
||||
with self.subTest("LTE: attempt reconnect"):
|
||||
wifi_radio(False)
|
||||
print("waiting for reconnect attempt")
|
||||
start_time = time.monotonic()
|
||||
with Timeout(reconnect_time, "no reconnect attempt"):
|
||||
while not mock_create_connection.called:
|
||||
time.sleep(0.1)
|
||||
print(f"reconnect attempt after {time.monotonic() - start_time:.2f}s")
|
||||
|
||||
self._clear_ping_time()
|
||||
|
||||
# check ping received after reconnect
|
||||
with self.subTest("LTE: receives ping"), Timeout(70, "no ping received"):
|
||||
while not self._received_ping():
|
||||
time.sleep(0.1)
|
||||
print("ping received")
|
||||
|
||||
@unittest.skipIf(not TICI, "only run on desk")
|
||||
def test_offroad(self) -> None:
|
||||
write_onroad_params(False, self.params)
|
||||
self.assertTimeout(60 + TIMEOUT_TOLERANCE) # based using TCP keepalive settings
|
||||
|
||||
@unittest.skipIf(not TICI, "only run on desk")
|
||||
def test_onroad(self) -> None:
|
||||
write_onroad_params(True, self.params)
|
||||
self.assertTimeout(21 + TIMEOUT_TOLERANCE)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
81
selfdrive/athena/tests/test_registration.py
Normal file
81
selfdrive/athena/tests/test_registration.py
Normal file
@@ -0,0 +1,81 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
import unittest
|
||||
from Crypto.PublicKey import RSA
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
from openpilot.common.params import Params
|
||||
from openpilot.selfdrive.athena.registration import register, UNREGISTERED_DONGLE_ID
|
||||
from openpilot.selfdrive.athena.tests.helpers import MockResponse
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
|
||||
|
||||
class TestRegistration(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
# clear params and setup key paths
|
||||
self.params = Params()
|
||||
self.params.clear_all()
|
||||
|
||||
persist_dir = Path(Paths.persist_root()) / "comma"
|
||||
persist_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.priv_key = persist_dir / "id_rsa"
|
||||
self.pub_key = persist_dir / "id_rsa.pub"
|
||||
|
||||
def _generate_keys(self):
|
||||
self.pub_key.touch()
|
||||
k = RSA.generate(2048)
|
||||
with open(self.priv_key, "wb") as f:
|
||||
f.write(k.export_key())
|
||||
with open(self.pub_key, "wb") as f:
|
||||
f.write(k.publickey().export_key())
|
||||
|
||||
def test_valid_cache(self):
|
||||
# if all params are written, return the cached dongle id
|
||||
self.params.put("IMEI", "imei")
|
||||
self.params.put("HardwareSerial", "serial")
|
||||
self._generate_keys()
|
||||
|
||||
with mock.patch("openpilot.selfdrive.athena.registration.api_get", autospec=True) as m:
|
||||
dongle = "DONGLE_ID_123"
|
||||
self.params.put("DongleId", dongle)
|
||||
self.assertEqual(register(), dongle)
|
||||
self.assertFalse(m.called)
|
||||
|
||||
def test_no_keys(self):
|
||||
# missing pubkey
|
||||
with mock.patch("openpilot.selfdrive.athena.registration.api_get", autospec=True) as m:
|
||||
dongle = register()
|
||||
self.assertEqual(m.call_count, 0)
|
||||
self.assertEqual(dongle, UNREGISTERED_DONGLE_ID)
|
||||
self.assertEqual(self.params.get("DongleId", encoding='utf-8'), dongle)
|
||||
|
||||
def test_missing_cache(self):
|
||||
# keys exist but no dongle id
|
||||
self._generate_keys()
|
||||
with mock.patch("openpilot.selfdrive.athena.registration.api_get", autospec=True) as m:
|
||||
dongle = "DONGLE_ID_123"
|
||||
m.return_value = MockResponse(json.dumps({'dongle_id': dongle}), 200)
|
||||
self.assertEqual(register(), dongle)
|
||||
self.assertEqual(m.call_count, 1)
|
||||
|
||||
# call again, shouldn't hit the API this time
|
||||
self.assertEqual(register(), dongle)
|
||||
self.assertEqual(m.call_count, 1)
|
||||
self.assertEqual(self.params.get("DongleId", encoding='utf-8'), dongle)
|
||||
|
||||
def test_unregistered(self):
|
||||
# keys exist, but unregistered
|
||||
self._generate_keys()
|
||||
with mock.patch("openpilot.selfdrive.athena.registration.api_get", autospec=True) as m:
|
||||
m.return_value = MockResponse(None, 402)
|
||||
dongle = register()
|
||||
self.assertEqual(m.call_count, 1)
|
||||
self.assertEqual(dongle, UNREGISTERED_DONGLE_ID)
|
||||
self.assertEqual(self.params.get("DongleId", encoding='utf-8'), dongle)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user