Add openpilot tests
This commit is contained in:
2
selfdrive/test/process_replay/.gitignore
vendored
Normal file
2
selfdrive/test/process_replay/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
fakedata/
|
||||
debayer_diff.txt
|
||||
126
selfdrive/test/process_replay/README.md
Normal file
126
selfdrive/test/process_replay/README.md
Normal file
@@ -0,0 +1,126 @@
|
||||
# Process replay
|
||||
|
||||
Process replay is a regression test designed to identify any changes in the output of a process. This test replays a segment through individual processes and compares the output to a known good replay. Each make is represented in the test with a segment.
|
||||
|
||||
If the test fails, make sure that you didn't unintentionally change anything. If there are intentional changes, the reference logs will be updated.
|
||||
|
||||
Use `test_processes.py` to run the test locally.
|
||||
Use `FILEREADER_CACHE='1' test_processes.py` to cache log files.
|
||||
|
||||
Currently the following processes are tested:
|
||||
|
||||
* controlsd
|
||||
* radard
|
||||
* plannerd
|
||||
* calibrationd
|
||||
* dmonitoringd
|
||||
* locationd
|
||||
* paramsd
|
||||
* ubloxd
|
||||
* torqued
|
||||
|
||||
### Usage
|
||||
```
|
||||
Usage: test_processes.py [-h] [--whitelist-procs PROCS] [--whitelist-cars CARS] [--blacklist-procs PROCS]
|
||||
[--blacklist-cars CARS] [--ignore-fields FIELDS] [--ignore-msgs MSGS] [--update-refs] [--upload-only]
|
||||
Regression test to identify changes in a process's output
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--whitelist-procs PROCS Whitelist given processes from the test (e.g. controlsd)
|
||||
--whitelist-cars WHITELIST_CARS Whitelist given cars from the test (e.g. HONDA)
|
||||
--blacklist-procs BLACKLIST_PROCS Blacklist given processes from the test (e.g. controlsd)
|
||||
--blacklist-cars BLACKLIST_CARS Blacklist given cars from the test (e.g. HONDA)
|
||||
--ignore-fields IGNORE_FIELDS Extra fields or msgs to ignore (e.g. carState.events)
|
||||
--ignore-msgs IGNORE_MSGS Msgs to ignore (e.g. onroadEvents)
|
||||
--update-refs Updates reference logs using current commit
|
||||
--upload-only Skips testing processes and uploads logs from previous test run
|
||||
```
|
||||
|
||||
## Forks
|
||||
|
||||
openpilot forks can use this test with their own reference logs, by default `test_proccess.py` saves logs locally.
|
||||
|
||||
To generate new logs:
|
||||
|
||||
`./test_processes.py`
|
||||
|
||||
Then, check in the new logs using git-lfs. Make sure to also update the `ref_commit` file to the current commit.
|
||||
|
||||
## API
|
||||
|
||||
Process replay test suite exposes programmatic APIs for simultaneously running processes or groups of processes on provided logs.
|
||||
|
||||
```py
|
||||
def replay_process_with_name(name: Union[str, Iterable[str]], lr: LogIterable, *args, **kwargs) -> List[capnp._DynamicStructReader]:
|
||||
|
||||
def replay_process(
|
||||
cfg: Union[ProcessConfig, Iterable[ProcessConfig]], lr: LogIterable, frs: Optional[Dict[str, Any]] = None,
|
||||
fingerprint: Optional[str] = None, return_all_logs: bool = False, custom_params: Optional[Dict[str, Any]] = None, disable_progress: bool = False
|
||||
) -> List[capnp._DynamicStructReader]:
|
||||
```
|
||||
|
||||
Example usage:
|
||||
```py
|
||||
from openpilot.selfdrive.test.process_replay import replay_process_with_name
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
|
||||
lr = LogReader(...)
|
||||
|
||||
# provide a name of the process to replay
|
||||
output_logs = replay_process_with_name('locationd', lr)
|
||||
|
||||
# or list of names
|
||||
output_logs = replay_process_with_name(['ubloxd', 'locationd'], lr)
|
||||
```
|
||||
|
||||
Supported processes:
|
||||
* controlsd
|
||||
* radard
|
||||
* plannerd
|
||||
* calibrationd
|
||||
* dmonitoringd
|
||||
* locationd
|
||||
* paramsd
|
||||
* ubloxd
|
||||
* torqued
|
||||
* modeld
|
||||
* dmonitoringmodeld
|
||||
|
||||
Certain processes may require an initial state, which is usually supplied within `Params` and persisting from segment to segment (e.g CalibrationParams, LiveParameters). The `custom_params` is dictionary used to prepopulate `Params` with arbitrary values. The `get_custom_params_from_lr` helper is provided to fetch meaningful values from log files.
|
||||
|
||||
```py
|
||||
from openpilot.selfdrive.test.process_replay import get_custom_params_from_lr
|
||||
|
||||
previous_segment_lr = LogReader(...)
|
||||
current_segment_lr = LogReader(...)
|
||||
|
||||
custom_params = get_custom_params_from_lr(previous_segment_lr, 'last')
|
||||
|
||||
output_logs = replay_process_with_name('calibrationd', lr, custom_params=custom_params)
|
||||
```
|
||||
|
||||
Replaying processes that use VisionIPC (e.g. modeld, dmonitoringmodeld) require additional `frs` dictionary with camera states as keys and `FrameReader` objects as values.
|
||||
|
||||
```py
|
||||
from openpilot.tools.lib.framereader import FrameReader
|
||||
|
||||
frs = {
|
||||
'roadCameraState': FrameReader(...),
|
||||
'wideRoadCameraState': FrameReader(...),
|
||||
'driverCameraState': FrameReader(...),
|
||||
}
|
||||
|
||||
output_logs = replay_process_with_name(['modeld', 'dmonitoringmodeld'], lr, frs=frs)
|
||||
```
|
||||
|
||||
To capture stdout/stderr of the replayed process, `captured_output_store` can be provided.
|
||||
|
||||
```py
|
||||
output_store = dict()
|
||||
# pass dictionary by reference, it will be filled with standard outputs - even if process replay fails
|
||||
output_logs = replay_process_with_name(['radard', 'plannerd'], lr, captured_output_store=output_store)
|
||||
|
||||
# entries with captured output in format { 'out': '...', 'err': '...' } will be added to provided dictionary for each replayed process
|
||||
print(output_store['radard']['out']) # radard stdout
|
||||
print(output_store['radard']['err']) # radard stderr
|
||||
```
|
||||
2
selfdrive/test/process_replay/__init__.py
Normal file
2
selfdrive/test/process_replay/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
from openpilot.selfdrive.test.process_replay.process_replay import CONFIGS, get_process_config, get_custom_params_from_lr, \
|
||||
replay_process, replay_process_with_name # noqa: F401
|
||||
59
selfdrive/test/process_replay/capture.py
Normal file
59
selfdrive/test/process_replay/capture.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from typing import no_type_check
|
||||
|
||||
class FdRedirect:
|
||||
def __init__(self, file_prefix: str, fd: int):
|
||||
fname = os.path.join("/tmp", f"{file_prefix}.{fd}")
|
||||
if os.path.exists(fname):
|
||||
os.unlink(fname)
|
||||
self.dest_fd = os.open(fname, os.O_WRONLY | os.O_CREAT)
|
||||
self.dest_fname = fname
|
||||
self.source_fd = fd
|
||||
os.set_inheritable(self.dest_fd, True)
|
||||
|
||||
def __del__(self):
|
||||
os.close(self.dest_fd)
|
||||
|
||||
def purge(self) -> None:
|
||||
os.unlink(self.dest_fname)
|
||||
|
||||
def read(self) -> bytes:
|
||||
with open(self.dest_fname, "rb") as f:
|
||||
return f.read() or b""
|
||||
|
||||
def link(self) -> None:
|
||||
os.dup2(self.dest_fd, self.source_fd)
|
||||
|
||||
|
||||
class ProcessOutputCapture:
|
||||
def __init__(self, proc_name: str, prefix: str):
|
||||
prefix = f"{proc_name}_{prefix}"
|
||||
self.stdout_redirect = FdRedirect(prefix, 1)
|
||||
self.stderr_redirect = FdRedirect(prefix, 2)
|
||||
|
||||
def __del__(self):
|
||||
self.stdout_redirect.purge()
|
||||
self.stderr_redirect.purge()
|
||||
|
||||
@no_type_check # ipython classes have incompatible signatures
|
||||
def link_with_current_proc(self) -> None:
|
||||
try:
|
||||
# prevent ipykernel from redirecting stdout/stderr of python subprocesses
|
||||
from ipykernel.iostream import OutStream
|
||||
if isinstance(sys.stdout, OutStream):
|
||||
sys.stdout = sys.__stdout__
|
||||
if isinstance(sys.stderr, OutStream):
|
||||
sys.stderr = sys.__stderr__
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# link stdout/stderr to the fifo
|
||||
self.stdout_redirect.link()
|
||||
self.stderr_redirect.link()
|
||||
|
||||
def read_outerr(self) -> tuple[str, str]:
|
||||
out_str = self.stdout_redirect.read().decode()
|
||||
err_str = self.stderr_redirect.read().decode()
|
||||
return out_str, err_str
|
||||
150
selfdrive/test/process_replay/compare_logs.py
Normal file
150
selfdrive/test/process_replay/compare_logs.py
Normal file
@@ -0,0 +1,150 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import math
|
||||
import capnp
|
||||
import numbers
|
||||
import dictdiffer
|
||||
from collections import Counter
|
||||
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
|
||||
EPSILON = sys.float_info.epsilon
|
||||
|
||||
|
||||
def remove_ignored_fields(msg, ignore):
|
||||
msg = msg.as_builder()
|
||||
for key in ignore:
|
||||
attr = msg
|
||||
keys = key.split(".")
|
||||
if msg.which() != keys[0] and len(keys) > 1:
|
||||
continue
|
||||
|
||||
for k in keys[:-1]:
|
||||
# indexing into list
|
||||
if k.isdigit():
|
||||
attr = attr[int(k)]
|
||||
else:
|
||||
attr = getattr(attr, k)
|
||||
|
||||
v = getattr(attr, keys[-1])
|
||||
if isinstance(v, bool):
|
||||
val = False
|
||||
elif isinstance(v, numbers.Number):
|
||||
val = 0
|
||||
elif isinstance(v, (list, capnp.lib.capnp._DynamicListBuilder)):
|
||||
val = []
|
||||
else:
|
||||
raise NotImplementedError(f"Unknown type: {type(v)}")
|
||||
setattr(attr, keys[-1], val)
|
||||
return msg
|
||||
|
||||
|
||||
def compare_logs(log1, log2, ignore_fields=None, ignore_msgs=None, tolerance=None,):
|
||||
if ignore_fields is None:
|
||||
ignore_fields = []
|
||||
if ignore_msgs is None:
|
||||
ignore_msgs = []
|
||||
tolerance = EPSILON if tolerance is None else tolerance
|
||||
|
||||
log1, log2 = (
|
||||
[m for m in log if m.which() not in ignore_msgs]
|
||||
for log in (log1, log2)
|
||||
)
|
||||
|
||||
if len(log1) != len(log2):
|
||||
cnt1 = Counter(m.which() for m in log1)
|
||||
cnt2 = Counter(m.which() for m in log2)
|
||||
raise Exception(f"logs are not same length: {len(log1)} VS {len(log2)}\n\t\t{cnt1}\n\t\t{cnt2}")
|
||||
|
||||
diff = []
|
||||
for msg1, msg2 in zip(log1, log2, strict=True):
|
||||
if msg1.which() != msg2.which():
|
||||
raise Exception("msgs not aligned between logs")
|
||||
|
||||
msg1 = remove_ignored_fields(msg1, ignore_fields)
|
||||
msg2 = remove_ignored_fields(msg2, ignore_fields)
|
||||
|
||||
if msg1.to_bytes() != msg2.to_bytes():
|
||||
msg1_dict = msg1.as_reader().to_dict(verbose=True)
|
||||
msg2_dict = msg2.as_reader().to_dict(verbose=True)
|
||||
|
||||
dd = dictdiffer.diff(msg1_dict, msg2_dict, ignore=ignore_fields)
|
||||
|
||||
# Dictdiffer only supports relative tolerance, we also want to check for absolute
|
||||
# TODO: add this to dictdiffer
|
||||
def outside_tolerance(diff):
|
||||
try:
|
||||
if diff[0] == "change":
|
||||
a, b = diff[2]
|
||||
finite = math.isfinite(a) and math.isfinite(b)
|
||||
if finite and isinstance(a, numbers.Number) and isinstance(b, numbers.Number):
|
||||
return abs(a - b) > max(tolerance, tolerance * max(abs(a), abs(b)))
|
||||
except TypeError:
|
||||
pass
|
||||
return True
|
||||
|
||||
dd = list(filter(outside_tolerance, dd))
|
||||
|
||||
diff.extend(dd)
|
||||
return diff
|
||||
|
||||
|
||||
def format_process_diff(diff):
|
||||
diff_short, diff_long = "", ""
|
||||
|
||||
if isinstance(diff, str):
|
||||
diff_short += f" {diff}\n"
|
||||
diff_long += f"\t{diff}\n"
|
||||
else:
|
||||
cnt: dict[str, int] = {}
|
||||
for d in diff:
|
||||
diff_long += f"\t{str(d)}\n"
|
||||
|
||||
k = str(d[1])
|
||||
cnt[k] = 1 if k not in cnt else cnt[k] + 1
|
||||
|
||||
for k, v in sorted(cnt.items()):
|
||||
diff_short += f" {k}: {v}\n"
|
||||
|
||||
return diff_short, diff_long
|
||||
|
||||
|
||||
def format_diff(results, log_paths, ref_commit):
|
||||
diff_short, diff_long = "", ""
|
||||
diff_long += f"***** tested against commit {ref_commit} *****\n"
|
||||
|
||||
failed = False
|
||||
for segment, result in list(results.items()):
|
||||
diff_short += f"***** results for segment {segment} *****\n"
|
||||
diff_long += f"***** differences for segment {segment} *****\n"
|
||||
|
||||
for proc, diff in list(result.items()):
|
||||
diff_long += f"*** process: {proc} ***\n"
|
||||
diff_long += f"\tref: {log_paths[segment][proc]['ref']}\n"
|
||||
diff_long += f"\tnew: {log_paths[segment][proc]['new']}\n\n"
|
||||
|
||||
diff_short += f" {proc}\n"
|
||||
|
||||
if isinstance(diff, str) or len(diff):
|
||||
diff_short += f" ref: {log_paths[segment][proc]['ref']}\n"
|
||||
diff_short += f" new: {log_paths[segment][proc]['new']}\n\n"
|
||||
failed = True
|
||||
|
||||
proc_diff_short, proc_diff_long = format_process_diff(diff)
|
||||
|
||||
diff_long += proc_diff_long
|
||||
diff_short += proc_diff_short
|
||||
|
||||
return diff_short, diff_long, failed
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
log1 = list(LogReader(sys.argv[1]))
|
||||
log2 = list(LogReader(sys.argv[2]))
|
||||
ignore_fields = sys.argv[3:] or ["logMonoTime", "controlsState.startMonoTime", "controlsState.cumLagMs"]
|
||||
results = {"segment": {"proc": compare_logs(log1, log2, ignore_fields)}}
|
||||
log_paths = {"segment": {"proc": {"ref": sys.argv[1], "new": sys.argv[2]}}}
|
||||
diff_short, diff_long, failed = format_diff(results, log_paths, None)
|
||||
|
||||
print(diff_long)
|
||||
print(diff_short)
|
||||
1
selfdrive/test/process_replay/debayer_replay_ref_commit
Normal file
1
selfdrive/test/process_replay/debayer_replay_ref_commit
Normal file
@@ -0,0 +1 @@
|
||||
8f9ba7540b4549b4a57312129b8ff678d045f70f
|
||||
203
selfdrive/test/process_replay/migration.py
Normal file
203
selfdrive/test/process_replay/migration.py
Normal file
@@ -0,0 +1,203 @@
|
||||
from collections import defaultdict
|
||||
|
||||
from cereal import messaging
|
||||
from openpilot.selfdrive.test.process_replay.vision_meta import meta_from_encode_index
|
||||
from openpilot.selfdrive.car.toyota.values import EPS_SCALE
|
||||
from openpilot.selfdrive.manager.process_config import managed_processes
|
||||
from panda import Panda
|
||||
|
||||
|
||||
def migrate_all(lr, old_logtime=False, manager_states=False, panda_states=False, camera_states=False):
|
||||
msgs = migrate_sensorEvents(lr, old_logtime)
|
||||
msgs = migrate_carParams(msgs, old_logtime)
|
||||
if manager_states:
|
||||
msgs = migrate_managerState(msgs)
|
||||
if panda_states:
|
||||
msgs = migrate_pandaStates(msgs)
|
||||
msgs = migrate_peripheralState(msgs)
|
||||
if camera_states:
|
||||
msgs = migrate_cameraStates(msgs)
|
||||
|
||||
return msgs
|
||||
|
||||
|
||||
def migrate_managerState(lr):
|
||||
all_msgs = []
|
||||
for msg in lr:
|
||||
if msg.which() != "managerState":
|
||||
all_msgs.append(msg)
|
||||
continue
|
||||
|
||||
new_msg = msg.as_builder()
|
||||
new_msg.managerState.processes = [{'name': name, 'running': True} for name in managed_processes]
|
||||
all_msgs.append(new_msg.as_reader())
|
||||
|
||||
return all_msgs
|
||||
|
||||
|
||||
def migrate_pandaStates(lr):
|
||||
all_msgs = []
|
||||
# TODO: safety param migration should be handled automatically
|
||||
safety_param_migration = {
|
||||
"TOYOTA PRIUS 2017": EPS_SCALE["TOYOTA PRIUS 2017"] | Panda.FLAG_TOYOTA_STOCK_LONGITUDINAL,
|
||||
"TOYOTA RAV4 2017": EPS_SCALE["TOYOTA RAV4 2017"] | Panda.FLAG_TOYOTA_ALT_BRAKE | Panda.FLAG_TOYOTA_GAS_INTERCEPTOR,
|
||||
"KIA EV6 2022": Panda.FLAG_HYUNDAI_EV_GAS | Panda.FLAG_HYUNDAI_CANFD_HDA2,
|
||||
}
|
||||
|
||||
# Migrate safety param base on carState
|
||||
CP = next((m.carParams for m in lr if m.which() == 'carParams'), None)
|
||||
assert CP is not None, "carParams message not found"
|
||||
if CP.carFingerprint in safety_param_migration:
|
||||
safety_param = safety_param_migration[CP.carFingerprint]
|
||||
elif len(CP.safetyConfigs):
|
||||
safety_param = CP.safetyConfigs[0].safetyParam
|
||||
if CP.safetyConfigs[0].safetyParamDEPRECATED != 0:
|
||||
safety_param = CP.safetyConfigs[0].safetyParamDEPRECATED
|
||||
else:
|
||||
safety_param = CP.safetyParamDEPRECATED
|
||||
|
||||
for msg in lr:
|
||||
if msg.which() == 'pandaStateDEPRECATED':
|
||||
new_msg = messaging.new_message('pandaStates', 1)
|
||||
new_msg.valid = msg.valid
|
||||
new_msg.logMonoTime = msg.logMonoTime
|
||||
new_msg.pandaStates[0] = msg.pandaStateDEPRECATED
|
||||
new_msg.pandaStates[0].safetyParam = safety_param
|
||||
all_msgs.append(new_msg.as_reader())
|
||||
elif msg.which() == 'pandaStates':
|
||||
new_msg = msg.as_builder()
|
||||
new_msg.pandaStates[-1].safetyParam = safety_param
|
||||
all_msgs.append(new_msg.as_reader())
|
||||
else:
|
||||
all_msgs.append(msg)
|
||||
|
||||
return all_msgs
|
||||
|
||||
|
||||
def migrate_peripheralState(lr):
|
||||
if any(msg.which() == "peripheralState" for msg in lr):
|
||||
return lr
|
||||
|
||||
all_msg = []
|
||||
for msg in lr:
|
||||
all_msg.append(msg)
|
||||
if msg.which() not in ["pandaStates", "pandaStateDEPRECATED"]:
|
||||
continue
|
||||
|
||||
new_msg = messaging.new_message("peripheralState")
|
||||
new_msg.valid = msg.valid
|
||||
new_msg.logMonoTime = msg.logMonoTime
|
||||
all_msg.append(new_msg.as_reader())
|
||||
|
||||
return all_msg
|
||||
|
||||
|
||||
def migrate_cameraStates(lr):
|
||||
all_msgs = []
|
||||
frame_to_encode_id = defaultdict(dict)
|
||||
# just for encodeId fallback mechanism
|
||||
min_frame_id = defaultdict(lambda: float('inf'))
|
||||
|
||||
for msg in lr:
|
||||
if msg.which() not in ["roadEncodeIdx", "wideRoadEncodeIdx", "driverEncodeIdx"]:
|
||||
continue
|
||||
|
||||
encode_index = getattr(msg, msg.which())
|
||||
meta = meta_from_encode_index(msg.which())
|
||||
|
||||
assert encode_index.segmentId < 1200, f"Encoder index segmentId greater that 1200: {msg.which()} {encode_index.segmentId}"
|
||||
frame_to_encode_id[meta.camera_state][encode_index.frameId] = encode_index.segmentId
|
||||
|
||||
for msg in lr:
|
||||
if msg.which() not in ["roadCameraState", "wideRoadCameraState", "driverCameraState"]:
|
||||
all_msgs.append(msg)
|
||||
continue
|
||||
|
||||
camera_state = getattr(msg, msg.which())
|
||||
min_frame_id[msg.which()] = min(min_frame_id[msg.which()], camera_state.frameId)
|
||||
|
||||
encode_id = frame_to_encode_id[msg.which()].get(camera_state.frameId)
|
||||
if encode_id is None:
|
||||
print(f"Missing encoded frame for camera feed {msg.which()} with frameId: {camera_state.frameId}")
|
||||
if len(frame_to_encode_id[msg.which()]) != 0:
|
||||
continue
|
||||
|
||||
# fallback mechanism for logs without encodeIdx (e.g. logs from before 2022 with dcamera recording disabled)
|
||||
# try to fake encode_id by subtracting lowest frameId
|
||||
encode_id = camera_state.frameId - min_frame_id[msg.which()]
|
||||
print(f"Faking encodeId to {encode_id} for camera feed {msg.which()} with frameId: {camera_state.frameId}")
|
||||
|
||||
new_msg = messaging.new_message(msg.which())
|
||||
new_camera_state = getattr(new_msg, new_msg.which())
|
||||
new_camera_state.frameId = encode_id
|
||||
new_camera_state.encodeId = encode_id
|
||||
# timestampSof was added later so it might be missing on some old segments
|
||||
if camera_state.timestampSof == 0 and camera_state.timestampEof > 25000000:
|
||||
new_camera_state.timestampSof = camera_state.timestampEof - 18000000
|
||||
else:
|
||||
new_camera_state.timestampSof = camera_state.timestampSof
|
||||
new_camera_state.timestampEof = camera_state.timestampEof
|
||||
new_msg.logMonoTime = msg.logMonoTime
|
||||
new_msg.valid = msg.valid
|
||||
|
||||
all_msgs.append(new_msg.as_reader())
|
||||
|
||||
return all_msgs
|
||||
|
||||
|
||||
def migrate_carParams(lr, old_logtime=False):
|
||||
all_msgs = []
|
||||
for msg in lr:
|
||||
if msg.which() == 'carParams':
|
||||
CP = messaging.new_message('carParams')
|
||||
CP.valid = True
|
||||
CP.carParams = msg.carParams.as_builder()
|
||||
for car_fw in CP.carParams.carFw:
|
||||
car_fw.brand = CP.carParams.carName
|
||||
if old_logtime:
|
||||
CP.logMonoTime = msg.logMonoTime
|
||||
msg = CP.as_reader()
|
||||
all_msgs.append(msg)
|
||||
|
||||
return all_msgs
|
||||
|
||||
|
||||
def migrate_sensorEvents(lr, old_logtime=False):
|
||||
all_msgs = []
|
||||
for msg in lr:
|
||||
if msg.which() != 'sensorEventsDEPRECATED':
|
||||
all_msgs.append(msg)
|
||||
continue
|
||||
|
||||
# migrate to split sensor events
|
||||
for evt in msg.sensorEventsDEPRECATED:
|
||||
# build new message for each sensor type
|
||||
sensor_service = ''
|
||||
if evt.which() == 'acceleration':
|
||||
sensor_service = 'accelerometer'
|
||||
elif evt.which() == 'gyro' or evt.which() == 'gyroUncalibrated':
|
||||
sensor_service = 'gyroscope'
|
||||
elif evt.which() == 'light' or evt.which() == 'proximity':
|
||||
sensor_service = 'lightSensor'
|
||||
elif evt.which() == 'magnetic' or evt.which() == 'magneticUncalibrated':
|
||||
sensor_service = 'magnetometer'
|
||||
elif evt.which() == 'temperature':
|
||||
sensor_service = 'temperatureSensor'
|
||||
|
||||
m = messaging.new_message(sensor_service)
|
||||
m.valid = True
|
||||
if old_logtime:
|
||||
m.logMonoTime = msg.logMonoTime
|
||||
|
||||
m_dat = getattr(m, sensor_service)
|
||||
m_dat.version = evt.version
|
||||
m_dat.sensor = evt.sensor
|
||||
m_dat.type = evt.type
|
||||
m_dat.source = evt.source
|
||||
if old_logtime:
|
||||
m_dat.timestamp = evt.timestamp
|
||||
setattr(m_dat, evt.which(), getattr(evt, evt.which()))
|
||||
|
||||
all_msgs.append(m.as_reader())
|
||||
|
||||
return all_msgs
|
||||
249
selfdrive/test/process_replay/model_replay.py
Normal file
249
selfdrive/test/process_replay/model_replay.py
Normal file
@@ -0,0 +1,249 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from typing import Any
|
||||
|
||||
import cereal.messaging as messaging
|
||||
from openpilot.common.params import Params
|
||||
from openpilot.system.hardware import PC
|
||||
from openpilot.selfdrive.manager.process_config import managed_processes
|
||||
from openpilot.tools.lib.openpilotci import BASE_URL, get_url
|
||||
from openpilot.selfdrive.test.process_replay.compare_logs import compare_logs, format_diff
|
||||
from openpilot.selfdrive.test.process_replay.process_replay import get_process_config, replay_process
|
||||
from openpilot.system.version import get_commit
|
||||
from openpilot.tools.lib.framereader import FrameReader
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
from openpilot.tools.lib.helpers import save_log
|
||||
|
||||
TEST_ROUTE = "2f4452b03ccb98f0|2022-12-03--13-45-30"
|
||||
SEGMENT = 6
|
||||
MAX_FRAMES = 100 if PC else 600
|
||||
NAV_FRAMES = 50
|
||||
|
||||
NO_NAV = "NO_NAV" in os.environ
|
||||
NO_MODEL = "NO_MODEL" in os.environ
|
||||
SEND_EXTRA_INPUTS = bool(int(os.getenv("SEND_EXTRA_INPUTS", "0")))
|
||||
|
||||
|
||||
def get_log_fn(ref_commit, test_route):
|
||||
return f"{test_route}_model_tici_{ref_commit}.bz2"
|
||||
|
||||
|
||||
def trim_logs_to_max_frames(logs, max_frames, frs_types, include_all_types):
|
||||
all_msgs = []
|
||||
cam_state_counts = defaultdict(int)
|
||||
# keep adding messages until cam states are equal to MAX_FRAMES
|
||||
for msg in sorted(logs, key=lambda m: m.logMonoTime):
|
||||
all_msgs.append(msg)
|
||||
if msg.which() in frs_types:
|
||||
cam_state_counts[msg.which()] += 1
|
||||
|
||||
if all(cam_state_counts[state] == max_frames for state in frs_types):
|
||||
break
|
||||
|
||||
if len(include_all_types) != 0:
|
||||
other_msgs = [m for m in logs if m.which() in include_all_types]
|
||||
all_msgs.extend(other_msgs)
|
||||
|
||||
return all_msgs
|
||||
|
||||
|
||||
def nav_model_replay(lr):
|
||||
sm = messaging.SubMaster(['navModel', 'navThumbnail', 'mapRenderState'])
|
||||
pm = messaging.PubMaster(['liveLocationKalman', 'navRoute'])
|
||||
|
||||
nav = [m for m in lr if m.which() == 'navRoute']
|
||||
llk = [m for m in lr if m.which() == 'liveLocationKalman']
|
||||
assert len(nav) > 0 and len(llk) >= NAV_FRAMES and nav[0].logMonoTime < llk[-NAV_FRAMES].logMonoTime
|
||||
|
||||
log_msgs = []
|
||||
try:
|
||||
assert "MAPBOX_TOKEN" in os.environ
|
||||
os.environ['MAP_RENDER_TEST_MODE'] = '1'
|
||||
Params().put_bool('DmModelInitialized', True)
|
||||
managed_processes['mapsd'].start()
|
||||
managed_processes['navmodeld'].start()
|
||||
|
||||
# setup position and route
|
||||
for _ in range(10):
|
||||
for s in (llk[-NAV_FRAMES], nav[0]):
|
||||
pm.send(s.which(), s.as_builder().to_bytes())
|
||||
sm.update(1000)
|
||||
if sm.updated['navModel']:
|
||||
break
|
||||
time.sleep(1)
|
||||
|
||||
if not sm.updated['navModel']:
|
||||
raise Exception("no navmodeld outputs, failed to initialize")
|
||||
|
||||
# drain
|
||||
time.sleep(2)
|
||||
sm.update(0)
|
||||
|
||||
# run replay
|
||||
for n in range(len(llk) - NAV_FRAMES, len(llk)):
|
||||
pm.send(llk[n].which(), llk[n].as_builder().to_bytes())
|
||||
m = messaging.recv_one(sm.sock['navThumbnail'])
|
||||
assert m is not None, f"no navThumbnail, frame={n}"
|
||||
log_msgs.append(m)
|
||||
|
||||
m = messaging.recv_one(sm.sock['mapRenderState'])
|
||||
assert m is not None, f"no mapRenderState, frame={n}"
|
||||
log_msgs.append(m)
|
||||
|
||||
m = messaging.recv_one(sm.sock['navModel'])
|
||||
assert m is not None, f"no navModel response, frame={n}"
|
||||
log_msgs.append(m)
|
||||
finally:
|
||||
managed_processes['mapsd'].stop()
|
||||
managed_processes['navmodeld'].stop()
|
||||
|
||||
return log_msgs
|
||||
|
||||
|
||||
def model_replay(lr, frs):
|
||||
# modeld is using frame pairs
|
||||
modeld_logs = trim_logs_to_max_frames(lr, MAX_FRAMES, {"roadCameraState", "wideRoadCameraState"}, {"roadEncodeIdx", "wideRoadEncodeIdx", "carParams"})
|
||||
dmodeld_logs = trim_logs_to_max_frames(lr, MAX_FRAMES, {"driverCameraState"}, {"driverEncodeIdx", "carParams"})
|
||||
if not SEND_EXTRA_INPUTS:
|
||||
modeld_logs = [msg for msg in modeld_logs if msg.which() not in ["liveCalibration",]]
|
||||
dmodeld_logs = [msg for msg in dmodeld_logs if msg.which() not in ["liveCalibration",]]
|
||||
# initial calibration
|
||||
cal_msg = next(msg for msg in lr if msg.which() == "liveCalibration").as_builder()
|
||||
cal_msg.logMonoTime = lr[0].logMonoTime
|
||||
modeld_logs.insert(0, cal_msg.as_reader())
|
||||
dmodeld_logs.insert(0, cal_msg.as_reader())
|
||||
|
||||
modeld = get_process_config("modeld")
|
||||
dmonitoringmodeld = get_process_config("dmonitoringmodeld")
|
||||
|
||||
modeld_msgs = replay_process(modeld, modeld_logs, frs)
|
||||
dmonitoringmodeld_msgs = replay_process(dmonitoringmodeld, dmodeld_logs, frs)
|
||||
return modeld_msgs + dmonitoringmodeld_msgs
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
update = "--update" in sys.argv
|
||||
replay_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
ref_commit_fn = os.path.join(replay_dir, "model_replay_ref_commit")
|
||||
|
||||
# load logs
|
||||
lr = list(LogReader(get_url(TEST_ROUTE, SEGMENT)))
|
||||
frs = {
|
||||
'roadCameraState': FrameReader(get_url(TEST_ROUTE, SEGMENT, log_type="fcamera"), readahead=True),
|
||||
'driverCameraState': FrameReader(get_url(TEST_ROUTE, SEGMENT, log_type="dcamera"), readahead=True),
|
||||
'wideRoadCameraState': FrameReader(get_url(TEST_ROUTE, SEGMENT, log_type="ecamera"), readahead=True)
|
||||
}
|
||||
|
||||
# Update tile refs
|
||||
if update:
|
||||
import urllib
|
||||
import requests
|
||||
import threading
|
||||
import http.server
|
||||
from openpilot.tools.lib.openpilotci import upload_bytes
|
||||
os.environ['MAPS_HOST'] = 'http://localhost:5000'
|
||||
|
||||
class HTTPRequestHandler(http.server.BaseHTTPRequestHandler):
|
||||
def do_GET(self):
|
||||
assert len(self.path) > 10 # Sanity check on path length
|
||||
r = requests.get(f'https://api.mapbox.com{self.path}', timeout=30)
|
||||
upload_bytes(r.content, urllib.parse.urlparse(self.path).path.lstrip('/'))
|
||||
self.send_response(r.status_code)
|
||||
self.send_header('Content-type','text/html')
|
||||
self.end_headers()
|
||||
self.wfile.write(r.content)
|
||||
|
||||
server = http.server.HTTPServer(("127.0.0.1", 5000), HTTPRequestHandler)
|
||||
thread = threading.Thread(None, server.serve_forever, daemon=True)
|
||||
thread.start()
|
||||
else:
|
||||
os.environ['MAPS_HOST'] = BASE_URL.rstrip('/')
|
||||
|
||||
log_msgs = []
|
||||
# run replays
|
||||
if not NO_MODEL:
|
||||
log_msgs += model_replay(lr, frs)
|
||||
if not NO_NAV:
|
||||
log_msgs += nav_model_replay(lr)
|
||||
|
||||
# get diff
|
||||
failed = False
|
||||
if not update:
|
||||
with open(ref_commit_fn) as f:
|
||||
ref_commit = f.read().strip()
|
||||
log_fn = get_log_fn(ref_commit, TEST_ROUTE)
|
||||
try:
|
||||
all_logs = list(LogReader(BASE_URL + log_fn))
|
||||
cmp_log = []
|
||||
|
||||
# logs are ordered based on type: modelV2, driverStateV2, nav messages (navThumbnail, mapRenderState, navModel)
|
||||
if not NO_MODEL:
|
||||
model_start_index = next(i for i, m in enumerate(all_logs) if m.which() in ("modelV2", "cameraOdometry"))
|
||||
cmp_log += all_logs[model_start_index:model_start_index + MAX_FRAMES*2]
|
||||
dmon_start_index = next(i for i, m in enumerate(all_logs) if m.which() == "driverStateV2")
|
||||
cmp_log += all_logs[dmon_start_index:dmon_start_index + MAX_FRAMES]
|
||||
if not NO_NAV:
|
||||
nav_start_index = next(i for i, m in enumerate(all_logs) if m.which() in ["navThumbnail", "mapRenderState", "navModel"])
|
||||
nav_logs = all_logs[nav_start_index:nav_start_index + NAV_FRAMES*3]
|
||||
cmp_log += nav_logs
|
||||
|
||||
ignore = [
|
||||
'logMonoTime',
|
||||
'modelV2.frameDropPerc',
|
||||
'modelV2.modelExecutionTime',
|
||||
'driverStateV2.modelExecutionTime',
|
||||
'driverStateV2.dspExecutionTime',
|
||||
'navModel.dspExecutionTime',
|
||||
'navModel.modelExecutionTime',
|
||||
'navThumbnail.timestampEof',
|
||||
'mapRenderState.locationMonoTime',
|
||||
'mapRenderState.renderTime',
|
||||
]
|
||||
if PC:
|
||||
ignore += [
|
||||
'modelV2.laneLines.0.t',
|
||||
'modelV2.laneLines.1.t',
|
||||
'modelV2.laneLines.2.t',
|
||||
'modelV2.laneLines.3.t',
|
||||
'modelV2.roadEdges.0.t',
|
||||
'modelV2.roadEdges.1.t',
|
||||
]
|
||||
# TODO this tolerance is absurdly large
|
||||
tolerance = 2.0 if PC else None
|
||||
results: Any = {TEST_ROUTE: {}}
|
||||
log_paths: Any = {TEST_ROUTE: {"models": {'ref': BASE_URL + log_fn, 'new': log_fn}}}
|
||||
results[TEST_ROUTE]["models"] = compare_logs(cmp_log, log_msgs, tolerance=tolerance, ignore_fields=ignore)
|
||||
diff_short, diff_long, failed = format_diff(results, log_paths, ref_commit)
|
||||
|
||||
print(diff_long)
|
||||
print('-------------\n'*5)
|
||||
print(diff_short)
|
||||
with open("model_diff.txt", "w") as f:
|
||||
f.write(diff_long)
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
failed = True
|
||||
|
||||
# upload new refs
|
||||
if (update or failed) and not PC:
|
||||
from openpilot.tools.lib.openpilotci import upload_file
|
||||
|
||||
print("Uploading new refs")
|
||||
|
||||
new_commit = get_commit()
|
||||
log_fn = get_log_fn(new_commit, TEST_ROUTE)
|
||||
save_log(log_fn, log_msgs)
|
||||
try:
|
||||
upload_file(log_fn, os.path.basename(log_fn))
|
||||
except Exception as e:
|
||||
print("failed to upload", e)
|
||||
|
||||
with open(ref_commit_fn, 'w') as f:
|
||||
f.write(str(new_commit))
|
||||
|
||||
print("\n\nNew ref commit: ", new_commit)
|
||||
|
||||
sys.exit(int(failed))
|
||||
1
selfdrive/test/process_replay/model_replay_ref_commit
Normal file
1
selfdrive/test/process_replay/model_replay_ref_commit
Normal file
@@ -0,0 +1 @@
|
||||
e8b359a82316e6dfce3b6fb0fb9684431bfa0a1b
|
||||
800
selfdrive/test/process_replay/process_replay.py
Normal file
800
selfdrive/test/process_replay/process_replay.py
Normal file
@@ -0,0 +1,800 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import time
|
||||
import copy
|
||||
import json
|
||||
import heapq
|
||||
import signal
|
||||
import platform
|
||||
from collections import OrderedDict
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any
|
||||
from collections.abc import Callable, Iterable
|
||||
from tqdm import tqdm
|
||||
import capnp
|
||||
|
||||
import cereal.messaging as messaging
|
||||
from cereal import car
|
||||
from cereal.services import SERVICE_LIST
|
||||
from cereal.visionipc import VisionIpcServer, get_endpoint_name as vipc_get_endpoint_name
|
||||
from openpilot.common.params import Params
|
||||
from openpilot.common.prefix import OpenpilotPrefix
|
||||
from openpilot.common.timeout import Timeout
|
||||
from openpilot.common.realtime import DT_CTRL
|
||||
from panda.python import ALTERNATIVE_EXPERIENCE
|
||||
from openpilot.selfdrive.car.car_helpers import get_car, interfaces
|
||||
from openpilot.selfdrive.manager.process_config import managed_processes
|
||||
from openpilot.selfdrive.test.process_replay.vision_meta import meta_from_camera_state, available_streams
|
||||
from openpilot.selfdrive.test.process_replay.migration import migrate_all
|
||||
from openpilot.selfdrive.test.process_replay.capture import ProcessOutputCapture
|
||||
from openpilot.tools.lib.logreader import LogIterable
|
||||
from openpilot.tools.lib.framereader import BaseFrameReader
|
||||
|
||||
# Numpy gives different results based on CPU features after version 19
|
||||
NUMPY_TOLERANCE = 1e-7
|
||||
PROC_REPLAY_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
FAKEDATA = os.path.join(PROC_REPLAY_DIR, "fakedata/")
|
||||
|
||||
class DummySocket:
|
||||
def __init__(self):
|
||||
self.data: list[bytes] = []
|
||||
|
||||
def receive(self, non_blocking: bool = False) -> bytes | None:
|
||||
if non_blocking:
|
||||
return None
|
||||
|
||||
return self.data.pop()
|
||||
|
||||
def send(self, data: bytes):
|
||||
self.data.append(data)
|
||||
|
||||
class LauncherWithCapture:
|
||||
def __init__(self, capture: ProcessOutputCapture, launcher: Callable):
|
||||
self.capture = capture
|
||||
self.launcher = launcher
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
self.capture.link_with_current_proc()
|
||||
self.launcher(*args, **kwargs)
|
||||
|
||||
|
||||
class ReplayContext:
|
||||
def __init__(self, cfg):
|
||||
self.proc_name = cfg.proc_name
|
||||
self.pubs = cfg.pubs
|
||||
self.main_pub = cfg.main_pub
|
||||
self.main_pub_drained = cfg.main_pub_drained
|
||||
self.unlocked_pubs = cfg.unlocked_pubs
|
||||
assert(len(self.pubs) != 0 or self.main_pub is not None)
|
||||
|
||||
def __enter__(self):
|
||||
self.open_context()
|
||||
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_obj, exc_tb):
|
||||
self.close_context()
|
||||
|
||||
def open_context(self):
|
||||
messaging.toggle_fake_events(True)
|
||||
messaging.set_fake_prefix(self.proc_name)
|
||||
|
||||
if self.main_pub is None:
|
||||
self.events = OrderedDict()
|
||||
pubs_with_events = [pub for pub in self.pubs if pub not in self.unlocked_pubs]
|
||||
for pub in pubs_with_events:
|
||||
self.events[pub] = messaging.fake_event_handle(pub, enable=True)
|
||||
else:
|
||||
self.events = {self.main_pub: messaging.fake_event_handle(self.main_pub, enable=True)}
|
||||
|
||||
def close_context(self):
|
||||
del self.events
|
||||
|
||||
messaging.toggle_fake_events(False)
|
||||
messaging.delete_fake_prefix()
|
||||
|
||||
@property
|
||||
def all_recv_called_events(self):
|
||||
return [man.recv_called_event for man in self.events.values()]
|
||||
|
||||
@property
|
||||
def all_recv_ready_events(self):
|
||||
return [man.recv_ready_event for man in self.events.values()]
|
||||
|
||||
def send_sync(self, pm, endpoint, dat):
|
||||
self.events[endpoint].recv_called_event.wait()
|
||||
self.events[endpoint].recv_called_event.clear()
|
||||
pm.send(endpoint, dat)
|
||||
self.events[endpoint].recv_ready_event.set()
|
||||
|
||||
def unlock_sockets(self):
|
||||
expected_sets = len(self.events)
|
||||
while expected_sets > 0:
|
||||
index = messaging.wait_for_one_event(self.all_recv_called_events)
|
||||
self.all_recv_called_events[index].clear()
|
||||
self.all_recv_ready_events[index].set()
|
||||
expected_sets -= 1
|
||||
|
||||
def wait_for_recv_called(self):
|
||||
messaging.wait_for_one_event(self.all_recv_called_events)
|
||||
|
||||
def wait_for_next_recv(self, trigger_empty_recv):
|
||||
index = messaging.wait_for_one_event(self.all_recv_called_events)
|
||||
if self.main_pub is not None and self.main_pub_drained and trigger_empty_recv:
|
||||
self.all_recv_called_events[index].clear()
|
||||
self.all_recv_ready_events[index].set()
|
||||
self.all_recv_called_events[index].wait()
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProcessConfig:
|
||||
proc_name: str
|
||||
pubs: list[str]
|
||||
subs: list[str]
|
||||
ignore: list[str]
|
||||
config_callback: Callable | None = None
|
||||
init_callback: Callable | None = None
|
||||
should_recv_callback: Callable | None = None
|
||||
tolerance: float | None = None
|
||||
processing_time: float = 0.001
|
||||
timeout: int = 30
|
||||
simulation: bool = True
|
||||
main_pub: str | None = None
|
||||
main_pub_drained: bool = True
|
||||
vision_pubs: list[str] = field(default_factory=list)
|
||||
ignore_alive_pubs: list[str] = field(default_factory=list)
|
||||
unlocked_pubs: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
class ProcessContainer:
|
||||
def __init__(self, cfg: ProcessConfig):
|
||||
self.prefix = OpenpilotPrefix(clean_dirs_on_exit=False)
|
||||
self.cfg = copy.deepcopy(cfg)
|
||||
self.process = copy.deepcopy(managed_processes[cfg.proc_name])
|
||||
self.msg_queue: list[capnp._DynamicStructReader] = []
|
||||
self.cnt = 0
|
||||
self.pm: messaging.PubMaster | None = None
|
||||
self.sockets: list[messaging.SubSocket] | None = None
|
||||
self.rc: ReplayContext | None = None
|
||||
self.vipc_server: VisionIpcServer | None = None
|
||||
self.environ_config: dict[str, Any] | None = None
|
||||
self.capture: ProcessOutputCapture | None = None
|
||||
|
||||
@property
|
||||
def has_empty_queue(self) -> bool:
|
||||
return len(self.msg_queue) == 0
|
||||
|
||||
@property
|
||||
def pubs(self) -> list[str]:
|
||||
return self.cfg.pubs
|
||||
|
||||
@property
|
||||
def subs(self) -> list[str]:
|
||||
return self.cfg.subs
|
||||
|
||||
def _clean_env(self):
|
||||
for k in self.environ_config.keys():
|
||||
if k in os.environ:
|
||||
del os.environ[k]
|
||||
|
||||
for k in ["PROC_NAME", "SIMULATION"]:
|
||||
if k in os.environ:
|
||||
del os.environ[k]
|
||||
|
||||
def _setup_env(self, params_config: dict[str, Any], environ_config: dict[str, Any]):
|
||||
for k, v in environ_config.items():
|
||||
if len(v) != 0:
|
||||
os.environ[k] = v
|
||||
elif k in os.environ:
|
||||
del os.environ[k]
|
||||
|
||||
os.environ["PROC_NAME"] = self.cfg.proc_name
|
||||
if self.cfg.simulation:
|
||||
os.environ["SIMULATION"] = "1"
|
||||
elif "SIMULATION" in os.environ:
|
||||
del os.environ["SIMULATION"]
|
||||
|
||||
params = Params()
|
||||
for k, v in params_config.items():
|
||||
if isinstance(v, bool):
|
||||
params.put_bool(k, v)
|
||||
else:
|
||||
params.put(k, v)
|
||||
|
||||
self.environ_config = environ_config
|
||||
|
||||
def _setup_vision_ipc(self, all_msgs: LogIterable, frs: dict[str, Any]):
|
||||
assert len(self.cfg.vision_pubs) != 0
|
||||
|
||||
vipc_server = VisionIpcServer("camerad")
|
||||
streams_metas = available_streams(all_msgs)
|
||||
for meta in streams_metas:
|
||||
if meta.camera_state in self.cfg.vision_pubs:
|
||||
frame_size = (frs[meta.camera_state].w, frs[meta.camera_state].h)
|
||||
vipc_server.create_buffers(meta.stream, 2, False, *frame_size)
|
||||
vipc_server.start_listener()
|
||||
|
||||
self.vipc_server = vipc_server
|
||||
self.cfg.vision_pubs = [meta.camera_state for meta in streams_metas if meta.camera_state in self.cfg.vision_pubs]
|
||||
|
||||
def _start_process(self):
|
||||
if self.capture is not None:
|
||||
self.process.launcher = LauncherWithCapture(self.capture, self.process.launcher)
|
||||
self.process.prepare()
|
||||
self.process.start()
|
||||
|
||||
def start(
|
||||
self, params_config: dict[str, Any], environ_config: dict[str, Any],
|
||||
all_msgs: LogIterable, frs: dict[str, BaseFrameReader] | None,
|
||||
fingerprint: str | None, capture_output: bool
|
||||
):
|
||||
with self.prefix as p:
|
||||
self._setup_env(params_config, environ_config)
|
||||
|
||||
if self.cfg.config_callback is not None:
|
||||
params = Params()
|
||||
self.cfg.config_callback(params, self.cfg, all_msgs)
|
||||
|
||||
self.rc = ReplayContext(self.cfg)
|
||||
self.rc.open_context()
|
||||
|
||||
self.pm = messaging.PubMaster(self.cfg.pubs)
|
||||
self.sockets = [messaging.sub_sock(s, timeout=100) for s in self.cfg.subs]
|
||||
|
||||
if len(self.cfg.vision_pubs) != 0:
|
||||
assert frs is not None
|
||||
self._setup_vision_ipc(all_msgs, frs)
|
||||
assert self.vipc_server is not None
|
||||
|
||||
if capture_output:
|
||||
self.capture = ProcessOutputCapture(self.cfg.proc_name, p.prefix)
|
||||
|
||||
self._start_process()
|
||||
|
||||
if self.cfg.init_callback is not None:
|
||||
self.cfg.init_callback(self.rc, self.pm, all_msgs, fingerprint)
|
||||
|
||||
# wait for process to startup
|
||||
with Timeout(10, error_msg=f"timed out waiting for process to start: {repr(self.cfg.proc_name)}"):
|
||||
while not all(self.pm.all_readers_updated(s) for s in self.cfg.pubs if s not in self.cfg.ignore_alive_pubs):
|
||||
time.sleep(0)
|
||||
|
||||
def stop(self):
|
||||
with self.prefix:
|
||||
self.process.signal(signal.SIGKILL)
|
||||
self.process.stop()
|
||||
self.rc.close_context()
|
||||
self.prefix.clean_dirs()
|
||||
self._clean_env()
|
||||
|
||||
def run_step(self, msg: capnp._DynamicStructReader, frs: dict[str, BaseFrameReader] | None) -> list[capnp._DynamicStructReader]:
|
||||
assert self.rc and self.pm and self.sockets and self.process.proc
|
||||
|
||||
output_msgs = []
|
||||
with self.prefix, Timeout(self.cfg.timeout, error_msg=f"timed out testing process {repr(self.cfg.proc_name)}"):
|
||||
end_of_cycle = True
|
||||
if self.cfg.should_recv_callback is not None:
|
||||
end_of_cycle = self.cfg.should_recv_callback(msg, self.cfg, self.cnt)
|
||||
|
||||
self.msg_queue.append(msg)
|
||||
if end_of_cycle:
|
||||
self.rc.wait_for_recv_called()
|
||||
|
||||
# call recv to let sub-sockets reconnect, after we know the process is ready
|
||||
if self.cnt == 0:
|
||||
for s in self.sockets:
|
||||
messaging.recv_one_or_none(s)
|
||||
|
||||
# empty recv on drained pub indicates the end of messages, only do that if there're any
|
||||
trigger_empty_recv = False
|
||||
if self.cfg.main_pub and self.cfg.main_pub_drained:
|
||||
trigger_empty_recv = next((True for m in self.msg_queue if m.which() == self.cfg.main_pub), False)
|
||||
|
||||
for m in self.msg_queue:
|
||||
self.pm.send(m.which(), m.as_builder())
|
||||
# send frames if needed
|
||||
if self.vipc_server is not None and m.which() in self.cfg.vision_pubs:
|
||||
camera_state = getattr(m, m.which())
|
||||
camera_meta = meta_from_camera_state(m.which())
|
||||
assert frs is not None
|
||||
img = frs[m.which()].get(camera_state.frameId, pix_fmt="nv12")[0]
|
||||
self.vipc_server.send(camera_meta.stream, img.flatten().tobytes(),
|
||||
camera_state.frameId, camera_state.timestampSof, camera_state.timestampEof)
|
||||
self.msg_queue = []
|
||||
|
||||
self.rc.unlock_sockets()
|
||||
self.rc.wait_for_next_recv(trigger_empty_recv)
|
||||
|
||||
for socket in self.sockets:
|
||||
ms = messaging.drain_sock(socket)
|
||||
for m in ms:
|
||||
m = m.as_builder()
|
||||
m.logMonoTime = msg.logMonoTime + int(self.cfg.processing_time * 1e9)
|
||||
output_msgs.append(m.as_reader())
|
||||
self.cnt += 1
|
||||
assert self.process.proc.is_alive()
|
||||
|
||||
return output_msgs
|
||||
|
||||
|
||||
def controlsd_fingerprint_callback(rc, pm, msgs, fingerprint):
|
||||
print("start fingerprinting")
|
||||
params = Params()
|
||||
canmsgs = [msg for msg in msgs if msg.which() == "can"][:300]
|
||||
|
||||
# controlsd expects one arbitrary can and pandaState
|
||||
rc.send_sync(pm, "can", messaging.new_message("can", 1))
|
||||
pm.send("pandaStates", messaging.new_message("pandaStates", 1))
|
||||
rc.send_sync(pm, "can", messaging.new_message("can", 1))
|
||||
rc.wait_for_next_recv(True)
|
||||
|
||||
# fingerprinting is done, when CarParams is set
|
||||
while params.get("CarParams") is None:
|
||||
if len(canmsgs) == 0:
|
||||
raise ValueError("Fingerprinting failed. Run out of can msgs")
|
||||
|
||||
m = canmsgs.pop(0)
|
||||
rc.send_sync(pm, "can", m.as_builder().to_bytes())
|
||||
rc.wait_for_next_recv(False)
|
||||
|
||||
|
||||
def get_car_params_callback(rc, pm, msgs, fingerprint):
|
||||
params = Params()
|
||||
if fingerprint:
|
||||
CarInterface, _, _ = interfaces[fingerprint]
|
||||
CP = CarInterface.get_non_essential_params(fingerprint)
|
||||
else:
|
||||
can = DummySocket()
|
||||
sendcan = DummySocket()
|
||||
|
||||
canmsgs = [msg for msg in msgs if msg.which() == "can"]
|
||||
has_cached_cp = params.get("CarParamsCache") is not None
|
||||
assert len(canmsgs) != 0, "CAN messages are required for fingerprinting"
|
||||
assert os.environ.get("SKIP_FW_QUERY", False) or has_cached_cp, \
|
||||
"CarParamsCache is required for fingerprinting. Make sure to keep carParams msgs in the logs."
|
||||
|
||||
for m in canmsgs[:300]:
|
||||
can.send(m.as_builder().to_bytes())
|
||||
_, CP = get_car(can, sendcan, Params().get_bool("ExperimentalLongitudinalEnabled"))
|
||||
params.put("CarParams", CP.to_bytes())
|
||||
return CP
|
||||
|
||||
|
||||
def controlsd_rcv_callback(msg, cfg, frame):
|
||||
# no sendcan until controlsd is initialized
|
||||
if msg.which() != "can":
|
||||
return False
|
||||
|
||||
socks = [
|
||||
s for s in cfg.subs if
|
||||
frame % int(SERVICE_LIST[msg.which()].frequency / SERVICE_LIST[s].frequency) == 0
|
||||
]
|
||||
if "sendcan" in socks and (frame - 1) < 2000:
|
||||
socks.remove("sendcan")
|
||||
return len(socks) > 0
|
||||
|
||||
|
||||
def calibration_rcv_callback(msg, cfg, frame):
|
||||
# calibrationd publishes 1 calibrationData every 5 cameraOdometry packets.
|
||||
# should_recv always true to increment frame
|
||||
return (frame - 1) == 0 or msg.which() == 'cameraOdometry'
|
||||
|
||||
|
||||
def torqued_rcv_callback(msg, cfg, frame):
|
||||
# should_recv always true to increment frame
|
||||
return (frame - 1) == 0 or msg.which() == 'liveLocationKalman'
|
||||
|
||||
|
||||
def dmonitoringmodeld_rcv_callback(msg, cfg, frame):
|
||||
return msg.which() == "driverCameraState"
|
||||
|
||||
|
||||
class ModeldCameraSyncRcvCallback:
|
||||
def __init__(self):
|
||||
self.road_present = False
|
||||
self.wide_road_present = False
|
||||
self.is_dual_camera = True
|
||||
|
||||
def __call__(self, msg, cfg, frame):
|
||||
self.is_dual_camera = len(cfg.vision_pubs) == 2
|
||||
if msg.which() == "roadCameraState":
|
||||
self.road_present = True
|
||||
elif msg.which() == "wideRoadCameraState":
|
||||
self.wide_road_present = True
|
||||
|
||||
if self.road_present and self.wide_road_present:
|
||||
self.road_present, self.wide_road_present = False, False
|
||||
return True
|
||||
elif self.road_present and not self.is_dual_camera:
|
||||
self.road_present = False
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class MessageBasedRcvCallback:
|
||||
def __init__(self, trigger_msg_type):
|
||||
self.trigger_msg_type = trigger_msg_type
|
||||
|
||||
def __call__(self, msg, cfg, frame):
|
||||
return msg.which() == self.trigger_msg_type
|
||||
|
||||
|
||||
class FrequencyBasedRcvCallback:
|
||||
def __init__(self, trigger_msg_type):
|
||||
self.trigger_msg_type = trigger_msg_type
|
||||
|
||||
def __call__(self, msg, cfg, frame):
|
||||
if msg.which() != self.trigger_msg_type:
|
||||
return False
|
||||
|
||||
resp_sockets = [
|
||||
s for s in cfg.subs
|
||||
if frame % max(1, int(SERVICE_LIST[msg.which()].frequency / SERVICE_LIST[s].frequency)) == 0
|
||||
]
|
||||
return bool(len(resp_sockets))
|
||||
|
||||
|
||||
def controlsd_config_callback(params, cfg, lr):
|
||||
controlsState = None
|
||||
initialized = False
|
||||
for msg in lr:
|
||||
if msg.which() == "controlsState":
|
||||
controlsState = msg.controlsState
|
||||
if initialized:
|
||||
break
|
||||
elif msg.which() == "onroadEvents":
|
||||
initialized = car.CarEvent.EventName.controlsInitializing not in [e.name for e in msg.onroadEvents]
|
||||
|
||||
assert controlsState is not None and initialized, "controlsState never initialized"
|
||||
params.put("ReplayControlsState", controlsState.as_builder().to_bytes())
|
||||
|
||||
|
||||
def locationd_config_pubsub_callback(params, cfg, lr):
|
||||
ublox = params.get_bool("UbloxAvailable")
|
||||
sub_keys = ({"gpsLocation", } if ublox else {"gpsLocationExternal", })
|
||||
|
||||
cfg.pubs = set(cfg.pubs) - sub_keys
|
||||
|
||||
|
||||
CONFIGS = [
|
||||
ProcessConfig(
|
||||
proc_name="controlsd",
|
||||
pubs=[
|
||||
"can", "deviceState", "pandaStates", "peripheralState", "liveCalibration", "driverMonitoringState",
|
||||
"longitudinalPlan", "liveLocationKalman", "liveParameters", "radarState",
|
||||
"modelV2", "driverCameraState", "roadCameraState", "wideRoadCameraState", "managerState",
|
||||
"testJoystick", "liveTorqueParameters", "accelerometer", "gyroscope"
|
||||
],
|
||||
subs=["controlsState", "carState", "carControl", "sendcan", "onroadEvents", "carParams"],
|
||||
ignore=["logMonoTime", "controlsState.startMonoTime", "controlsState.cumLagMs"],
|
||||
config_callback=controlsd_config_callback,
|
||||
init_callback=controlsd_fingerprint_callback,
|
||||
should_recv_callback=controlsd_rcv_callback,
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
processing_time=0.004,
|
||||
main_pub="can",
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="radard",
|
||||
pubs=["can", "carState", "modelV2"],
|
||||
subs=["radarState", "liveTracks"],
|
||||
ignore=["logMonoTime", "radarState.cumLagMs"],
|
||||
init_callback=get_car_params_callback,
|
||||
should_recv_callback=MessageBasedRcvCallback("can"),
|
||||
main_pub="can",
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="plannerd",
|
||||
pubs=["modelV2", "carControl", "carState", "controlsState", "radarState"],
|
||||
subs=["longitudinalPlan", "uiPlan"],
|
||||
ignore=["logMonoTime", "longitudinalPlan.processingDelay", "longitudinalPlan.solverExecutionTime"],
|
||||
init_callback=get_car_params_callback,
|
||||
should_recv_callback=FrequencyBasedRcvCallback("modelV2"),
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="calibrationd",
|
||||
pubs=["carState", "cameraOdometry", "carParams"],
|
||||
subs=["liveCalibration"],
|
||||
ignore=["logMonoTime"],
|
||||
should_recv_callback=calibration_rcv_callback,
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="dmonitoringd",
|
||||
pubs=["driverStateV2", "liveCalibration", "carState", "modelV2", "controlsState"],
|
||||
subs=["driverMonitoringState"],
|
||||
ignore=["logMonoTime"],
|
||||
should_recv_callback=FrequencyBasedRcvCallback("driverStateV2"),
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="locationd",
|
||||
pubs=[
|
||||
"cameraOdometry", "accelerometer", "gyroscope", "gpsLocationExternal",
|
||||
"liveCalibration", "carState", "gpsLocation"
|
||||
],
|
||||
subs=["liveLocationKalman"],
|
||||
ignore=["logMonoTime"],
|
||||
config_callback=locationd_config_pubsub_callback,
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="paramsd",
|
||||
pubs=["liveLocationKalman", "carState"],
|
||||
subs=["liveParameters"],
|
||||
ignore=["logMonoTime"],
|
||||
init_callback=get_car_params_callback,
|
||||
should_recv_callback=FrequencyBasedRcvCallback("liveLocationKalman"),
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
processing_time=0.004,
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="ubloxd",
|
||||
pubs=["ubloxRaw"],
|
||||
subs=["ubloxGnss", "gpsLocationExternal"],
|
||||
ignore=["logMonoTime"],
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="torqued",
|
||||
pubs=["liveLocationKalman", "carState", "carControl"],
|
||||
subs=["liveTorqueParameters"],
|
||||
ignore=["logMonoTime"],
|
||||
init_callback=get_car_params_callback,
|
||||
should_recv_callback=torqued_rcv_callback,
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="modeld",
|
||||
pubs=["roadCameraState", "wideRoadCameraState", "liveCalibration", "driverMonitoringState"],
|
||||
subs=["modelV2", "cameraOdometry"],
|
||||
ignore=["logMonoTime", "modelV2.frameDropPerc", "modelV2.modelExecutionTime"],
|
||||
should_recv_callback=ModeldCameraSyncRcvCallback(),
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
processing_time=0.020,
|
||||
main_pub=vipc_get_endpoint_name("camerad", meta_from_camera_state("roadCameraState").stream),
|
||||
main_pub_drained=False,
|
||||
vision_pubs=["roadCameraState", "wideRoadCameraState"],
|
||||
ignore_alive_pubs=["wideRoadCameraState"],
|
||||
init_callback=get_car_params_callback,
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="dmonitoringmodeld",
|
||||
pubs=["liveCalibration", "driverCameraState"],
|
||||
subs=["driverStateV2"],
|
||||
ignore=["logMonoTime", "driverStateV2.modelExecutionTime", "driverStateV2.dspExecutionTime"],
|
||||
should_recv_callback=dmonitoringmodeld_rcv_callback,
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
processing_time=0.020,
|
||||
main_pub=vipc_get_endpoint_name("camerad", meta_from_camera_state("driverCameraState").stream),
|
||||
main_pub_drained=False,
|
||||
vision_pubs=["driverCameraState"],
|
||||
ignore_alive_pubs=["driverCameraState"],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def get_process_config(name: str) -> ProcessConfig:
|
||||
try:
|
||||
return copy.deepcopy(next(c for c in CONFIGS if c.proc_name == name))
|
||||
except StopIteration as ex:
|
||||
raise Exception(f"Cannot find process config with name: {name}") from ex
|
||||
|
||||
|
||||
def get_custom_params_from_lr(lr: LogIterable, initial_state: str = "first") -> dict[str, Any]:
|
||||
"""
|
||||
Use this to get custom params dict based on provided logs.
|
||||
Useful when replaying following processes: calibrationd, paramsd, torqued
|
||||
The params may be based on first or last message of given type (carParams, liveCalibration, liveParameters, liveTorqueParameters) in the logs.
|
||||
"""
|
||||
|
||||
car_params = [m for m in lr if m.which() == "carParams"]
|
||||
live_calibration = [m for m in lr if m.which() == "liveCalibration"]
|
||||
live_parameters = [m for m in lr if m.which() == "liveParameters"]
|
||||
live_torque_parameters = [m for m in lr if m.which() == "liveTorqueParameters"]
|
||||
|
||||
assert initial_state in ["first", "last"]
|
||||
msg_index = 0 if initial_state == "first" else -1
|
||||
|
||||
assert len(car_params) > 0, "carParams required for initial state of liveParameters and CarParamsPrevRoute"
|
||||
CP = car_params[msg_index].carParams
|
||||
|
||||
custom_params = {
|
||||
"CarParamsPrevRoute": CP.as_builder().to_bytes()
|
||||
}
|
||||
|
||||
if len(live_calibration) > 0:
|
||||
custom_params["CalibrationParams"] = live_calibration[msg_index].as_builder().to_bytes()
|
||||
if len(live_parameters) > 0:
|
||||
lp_dict = live_parameters[msg_index].to_dict()
|
||||
lp_dict["carFingerprint"] = CP.carFingerprint
|
||||
custom_params["LiveParameters"] = json.dumps(lp_dict)
|
||||
if len(live_torque_parameters) > 0:
|
||||
custom_params["LiveTorqueParameters"] = live_torque_parameters[msg_index].as_builder().to_bytes()
|
||||
|
||||
return custom_params
|
||||
|
||||
|
||||
def replay_process_with_name(name: str | Iterable[str], lr: LogIterable, *args, **kwargs) -> list[capnp._DynamicStructReader]:
|
||||
if isinstance(name, str):
|
||||
cfgs = [get_process_config(name)]
|
||||
elif isinstance(name, Iterable):
|
||||
cfgs = [get_process_config(n) for n in name]
|
||||
else:
|
||||
raise ValueError("name must be str or collections of strings")
|
||||
|
||||
return replay_process(cfgs, lr, *args, **kwargs)
|
||||
|
||||
|
||||
def replay_process(
|
||||
cfg: ProcessConfig | Iterable[ProcessConfig], lr: LogIterable, frs: dict[str, BaseFrameReader] = None,
|
||||
fingerprint: str = None, return_all_logs: bool = False, custom_params: dict[str, Any] = None,
|
||||
captured_output_store: dict[str, dict[str, str]] = None, disable_progress: bool = False
|
||||
) -> list[capnp._DynamicStructReader]:
|
||||
if isinstance(cfg, Iterable):
|
||||
cfgs = list(cfg)
|
||||
else:
|
||||
cfgs = [cfg]
|
||||
|
||||
all_msgs = migrate_all(lr, old_logtime=True,
|
||||
manager_states=True,
|
||||
panda_states=any("pandaStates" in cfg.pubs for cfg in cfgs),
|
||||
camera_states=any(len(cfg.vision_pubs) != 0 for cfg in cfgs))
|
||||
process_logs = _replay_multi_process(cfgs, all_msgs, frs, fingerprint, custom_params, captured_output_store, disable_progress)
|
||||
|
||||
if return_all_logs:
|
||||
keys = {m.which() for m in process_logs}
|
||||
modified_logs = [m for m in all_msgs if m.which() not in keys]
|
||||
modified_logs.extend(process_logs)
|
||||
modified_logs.sort(key=lambda m: int(m.logMonoTime))
|
||||
log_msgs = modified_logs
|
||||
else:
|
||||
log_msgs = process_logs
|
||||
|
||||
return log_msgs
|
||||
|
||||
|
||||
def _replay_multi_process(
|
||||
cfgs: list[ProcessConfig], lr: LogIterable, frs: dict[str, BaseFrameReader] | None, fingerprint: str | None,
|
||||
custom_params: dict[str, Any] | None, captured_output_store: dict[str, dict[str, str]] | None, disable_progress: bool
|
||||
) -> list[capnp._DynamicStructReader]:
|
||||
if fingerprint is not None:
|
||||
params_config = generate_params_config(lr=lr, fingerprint=fingerprint, custom_params=custom_params)
|
||||
env_config = generate_environ_config(fingerprint=fingerprint)
|
||||
else:
|
||||
CP = next((m.carParams for m in lr if m.which() == "carParams"), None)
|
||||
params_config = generate_params_config(lr=lr, CP=CP, custom_params=custom_params)
|
||||
env_config = generate_environ_config(CP=CP)
|
||||
|
||||
# validate frs and vision pubs
|
||||
all_vision_pubs = [pub for cfg in cfgs for pub in cfg.vision_pubs]
|
||||
if len(all_vision_pubs) != 0:
|
||||
assert frs is not None, "frs must be provided when replaying process using vision streams"
|
||||
assert all(meta_from_camera_state(st) is not None for st in all_vision_pubs), \
|
||||
f"undefined vision stream spotted, probably misconfigured process: (vision pubs: {all_vision_pubs})"
|
||||
required_vision_pubs = {m.camera_state for m in available_streams(lr)} & set(all_vision_pubs)
|
||||
assert all(st in frs for st in required_vision_pubs), f"frs for this process must contain following vision streams: {required_vision_pubs}"
|
||||
|
||||
all_msgs = sorted(lr, key=lambda msg: msg.logMonoTime)
|
||||
log_msgs = []
|
||||
try:
|
||||
containers = []
|
||||
for cfg in cfgs:
|
||||
container = ProcessContainer(cfg)
|
||||
containers.append(container)
|
||||
container.start(params_config, env_config, all_msgs, frs, fingerprint, captured_output_store is not None)
|
||||
|
||||
all_pubs = {pub for container in containers for pub in container.pubs}
|
||||
all_subs = {sub for container in containers for sub in container.subs}
|
||||
lr_pubs = all_pubs - all_subs
|
||||
pubs_to_containers = {pub: [container for container in containers if pub in container.pubs] for pub in all_pubs}
|
||||
|
||||
pub_msgs = [msg for msg in all_msgs if msg.which() in lr_pubs]
|
||||
# external queue for messages taken from logs; internal queue for messages generated by processes, which will be republished
|
||||
external_pub_queue: list[capnp._DynamicStructReader] = pub_msgs.copy()
|
||||
internal_pub_queue: list[capnp._DynamicStructReader] = []
|
||||
# heap for maintaining the order of messages generated by processes, where each element: (logMonoTime, index in internal_pub_queue)
|
||||
internal_pub_index_heap: list[tuple[int, int]] = []
|
||||
|
||||
pbar = tqdm(total=len(external_pub_queue), disable=disable_progress)
|
||||
while len(external_pub_queue) != 0 or (len(internal_pub_index_heap) != 0 and not all(c.has_empty_queue for c in containers)):
|
||||
if len(internal_pub_index_heap) == 0 or (len(external_pub_queue) != 0 and external_pub_queue[0].logMonoTime < internal_pub_index_heap[0][0]):
|
||||
msg = external_pub_queue.pop(0)
|
||||
pbar.update(1)
|
||||
else:
|
||||
_, index = heapq.heappop(internal_pub_index_heap)
|
||||
msg = internal_pub_queue[index]
|
||||
|
||||
target_containers = pubs_to_containers[msg.which()]
|
||||
for container in target_containers:
|
||||
output_msgs = container.run_step(msg, frs)
|
||||
for m in output_msgs:
|
||||
if m.which() in all_pubs:
|
||||
internal_pub_queue.append(m)
|
||||
heapq.heappush(internal_pub_index_heap, (m.logMonoTime, len(internal_pub_queue) - 1))
|
||||
log_msgs.extend(output_msgs)
|
||||
finally:
|
||||
for container in containers:
|
||||
container.stop()
|
||||
if captured_output_store is not None:
|
||||
assert container.capture is not None
|
||||
out, err = container.capture.read_outerr()
|
||||
captured_output_store[container.cfg.proc_name] = {"out": out, "err": err}
|
||||
|
||||
return log_msgs
|
||||
|
||||
|
||||
def generate_params_config(lr=None, CP=None, fingerprint=None, custom_params=None) -> dict[str, Any]:
|
||||
params_dict = {
|
||||
"OpenpilotEnabledToggle": True,
|
||||
"DisengageOnAccelerator": True,
|
||||
"DisableLogging": False,
|
||||
}
|
||||
|
||||
if custom_params is not None:
|
||||
params_dict.update(custom_params)
|
||||
if lr is not None:
|
||||
has_ublox = any(msg.which() == "ubloxGnss" for msg in lr)
|
||||
params_dict["UbloxAvailable"] = has_ublox
|
||||
is_rhd = next((msg.driverMonitoringState.isRHD for msg in lr if msg.which() == "driverMonitoringState"), False)
|
||||
params_dict["IsRhdDetected"] = is_rhd
|
||||
|
||||
if CP is not None:
|
||||
if CP.alternativeExperience == ALTERNATIVE_EXPERIENCE.DISABLE_DISENGAGE_ON_GAS:
|
||||
params_dict["DisengageOnAccelerator"] = False
|
||||
|
||||
if fingerprint is None:
|
||||
if CP.fingerprintSource == "fw":
|
||||
params_dict["CarParamsCache"] = CP.as_builder().to_bytes()
|
||||
|
||||
if CP.openpilotLongitudinalControl:
|
||||
params_dict["ExperimentalLongitudinalEnabled"] = True
|
||||
|
||||
if CP.notCar:
|
||||
params_dict["JoystickDebugMode"] = True
|
||||
|
||||
return params_dict
|
||||
|
||||
|
||||
def generate_environ_config(CP=None, fingerprint=None, log_dir=None) -> dict[str, Any]:
|
||||
environ_dict = {}
|
||||
if platform.system() != "Darwin":
|
||||
environ_dict["PARAMS_ROOT"] = "/dev/shm/params"
|
||||
if log_dir is not None:
|
||||
environ_dict["LOG_ROOT"] = log_dir
|
||||
|
||||
environ_dict["REPLAY"] = "1"
|
||||
|
||||
# Regen or python process
|
||||
if CP is not None and fingerprint is None:
|
||||
if CP.fingerprintSource == "fw":
|
||||
environ_dict['SKIP_FW_QUERY'] = ""
|
||||
environ_dict['FINGERPRINT'] = ""
|
||||
else:
|
||||
environ_dict['SKIP_FW_QUERY'] = "1"
|
||||
environ_dict['FINGERPRINT'] = CP.carFingerprint
|
||||
elif fingerprint is not None:
|
||||
environ_dict['SKIP_FW_QUERY'] = "1"
|
||||
environ_dict['FINGERPRINT'] = fingerprint
|
||||
else:
|
||||
environ_dict["SKIP_FW_QUERY"] = ""
|
||||
environ_dict["FINGERPRINT"] = ""
|
||||
|
||||
return environ_dict
|
||||
|
||||
|
||||
def check_openpilot_enabled(msgs: LogIterable) -> bool:
|
||||
cur_enabled_count = 0
|
||||
max_enabled_count = 0
|
||||
for msg in msgs:
|
||||
if msg.which() == "carParams":
|
||||
if msg.carParams.notCar:
|
||||
return True
|
||||
elif msg.which() == "controlsState":
|
||||
if msg.controlsState.active:
|
||||
cur_enabled_count += 1
|
||||
else:
|
||||
cur_enabled_count = 0
|
||||
max_enabled_count = max(max_enabled_count, cur_enabled_count)
|
||||
|
||||
return max_enabled_count > int(10. / DT_CTRL)
|
||||
1
selfdrive/test/process_replay/ref_commit
Normal file
1
selfdrive/test/process_replay/ref_commit
Normal file
@@ -0,0 +1 @@
|
||||
43efe1cf08cba8c86bc1ae8234b3d3d084a40e5d
|
||||
158
selfdrive/test/process_replay/regen.py
Normal file
158
selfdrive/test/process_replay/regen.py
Normal file
@@ -0,0 +1,158 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import argparse
|
||||
import time
|
||||
import capnp
|
||||
import numpy as np
|
||||
|
||||
from typing import Any
|
||||
from collections.abc import Iterable
|
||||
|
||||
from openpilot.selfdrive.test.process_replay.process_replay import CONFIGS, FAKEDATA, ProcessConfig, replay_process, get_process_config, \
|
||||
check_openpilot_enabled, get_custom_params_from_lr
|
||||
from openpilot.selfdrive.test.process_replay.vision_meta import DRIVER_FRAME_SIZES
|
||||
from openpilot.selfdrive.test.update_ci_routes import upload_route
|
||||
from openpilot.tools.lib.route import Route
|
||||
from openpilot.tools.lib.framereader import FrameReader, BaseFrameReader, FrameType
|
||||
from openpilot.tools.lib.logreader import LogReader, LogIterable
|
||||
from openpilot.tools.lib.helpers import save_log
|
||||
|
||||
|
||||
class DummyFrameReader(BaseFrameReader):
|
||||
def __init__(self, w: int, h: int, frame_count: int, pix_val: int):
|
||||
self.pix_val = pix_val
|
||||
self.w, self.h = w, h
|
||||
self.frame_count = frame_count
|
||||
self.frame_type = FrameType.raw
|
||||
|
||||
def get(self, idx, count=1, pix_fmt="yuv420p"):
|
||||
if pix_fmt == "rgb24":
|
||||
shape = (self.h, self.w, 3)
|
||||
elif pix_fmt == "nv12" or pix_fmt == "yuv420p":
|
||||
shape = (int((self.h * self.w) * 3 / 2),)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
return [np.full(shape, self.pix_val, dtype=np.uint8) for _ in range(count)]
|
||||
|
||||
@staticmethod
|
||||
def zero_dcamera():
|
||||
return DummyFrameReader(*DRIVER_FRAME_SIZES["tici"], 1200, 0)
|
||||
|
||||
|
||||
def regen_segment(
|
||||
lr: LogIterable, frs: dict[str, Any] = None,
|
||||
processes: Iterable[ProcessConfig] = CONFIGS, disable_tqdm: bool = False
|
||||
) -> list[capnp._DynamicStructReader]:
|
||||
all_msgs = sorted(lr, key=lambda m: m.logMonoTime)
|
||||
custom_params = get_custom_params_from_lr(all_msgs)
|
||||
|
||||
print("Replayed processes:", [p.proc_name for p in processes])
|
||||
print("\n\n", "*"*30, "\n\n", sep="")
|
||||
|
||||
output_logs = replay_process(processes, all_msgs, frs, return_all_logs=True, custom_params=custom_params, disable_progress=disable_tqdm)
|
||||
|
||||
return output_logs
|
||||
|
||||
|
||||
def setup_data_readers(
|
||||
route: str, sidx: int, use_route_meta: bool,
|
||||
needs_driver_cam: bool = True, needs_road_cam: bool = True, dummy_driver_cam: bool = False
|
||||
) -> tuple[LogReader, dict[str, Any]]:
|
||||
if use_route_meta:
|
||||
r = Route(route)
|
||||
lr = LogReader(r.log_paths()[sidx])
|
||||
frs = {}
|
||||
if needs_road_cam and len(r.camera_paths()) > sidx and r.camera_paths()[sidx] is not None:
|
||||
frs['roadCameraState'] = FrameReader(r.camera_paths()[sidx])
|
||||
if needs_road_cam and len(r.ecamera_paths()) > sidx and r.ecamera_paths()[sidx] is not None:
|
||||
frs['wideRoadCameraState'] = FrameReader(r.ecamera_paths()[sidx])
|
||||
if needs_driver_cam:
|
||||
if dummy_driver_cam:
|
||||
frs['driverCameraState'] = DummyFrameReader.zero_dcamera()
|
||||
elif len(r.dcamera_paths()) > sidx and r.dcamera_paths()[sidx] is not None:
|
||||
device_type = next(str(msg.initData.deviceType) for msg in lr if msg.which() == "initData")
|
||||
assert device_type != "neo", "Driver camera not supported on neo segments. Use dummy dcamera."
|
||||
frs['driverCameraState'] = FrameReader(r.dcamera_paths()[sidx])
|
||||
else:
|
||||
lr = LogReader(f"cd:/{route.replace('|', '/')}/{sidx}/rlog.bz2")
|
||||
frs = {}
|
||||
if needs_road_cam:
|
||||
frs['roadCameraState'] = FrameReader(f"cd:/{route.replace('|', '/')}/{sidx}/fcamera.hevc")
|
||||
if next((True for m in lr if m.which() == "wideRoadCameraState"), False):
|
||||
frs['wideRoadCameraState'] = FrameReader(f"cd:/{route.replace('|', '/')}/{sidx}/ecamera.hevc")
|
||||
if needs_driver_cam:
|
||||
if dummy_driver_cam:
|
||||
frs['driverCameraState'] = DummyFrameReader.zero_dcamera()
|
||||
else:
|
||||
device_type = next(str(msg.initData.deviceType) for msg in lr if msg.which() == "initData")
|
||||
assert device_type != "neo", "Driver camera not supported on neo segments. Use dummy dcamera."
|
||||
frs['driverCameraState'] = FrameReader(f"cd:/{route.replace('|', '/')}/{sidx}/dcamera.hevc")
|
||||
|
||||
return lr, frs
|
||||
|
||||
|
||||
def regen_and_save(
|
||||
route: str, sidx: int, processes: str | Iterable[str] = "all", outdir: str = FAKEDATA,
|
||||
upload: bool = False, use_route_meta: bool = False, disable_tqdm: bool = False, dummy_driver_cam: bool = False
|
||||
) -> str:
|
||||
if not isinstance(processes, str) and not hasattr(processes, "__iter__"):
|
||||
raise ValueError("whitelist_proc must be a string or iterable")
|
||||
|
||||
if processes != "all":
|
||||
if isinstance(processes, str):
|
||||
raise ValueError(f"Invalid value for processes: {processes}")
|
||||
|
||||
replayed_processes = []
|
||||
for d in processes:
|
||||
cfg = get_process_config(d)
|
||||
replayed_processes.append(cfg)
|
||||
else:
|
||||
replayed_processes = CONFIGS
|
||||
|
||||
all_vision_pubs = {pub for cfg in replayed_processes for pub in cfg.vision_pubs}
|
||||
lr, frs = setup_data_readers(route, sidx, use_route_meta,
|
||||
needs_driver_cam="driverCameraState" in all_vision_pubs,
|
||||
needs_road_cam="roadCameraState" in all_vision_pubs or "wideRoadCameraState" in all_vision_pubs,
|
||||
dummy_driver_cam=dummy_driver_cam)
|
||||
output_logs = regen_segment(lr, frs, replayed_processes, disable_tqdm=disable_tqdm)
|
||||
|
||||
log_dir = os.path.join(outdir, time.strftime("%Y-%m-%d--%H-%M-%S--0", time.gmtime()))
|
||||
rel_log_dir = os.path.relpath(log_dir)
|
||||
rpath = os.path.join(log_dir, "rlog.bz2")
|
||||
|
||||
os.makedirs(log_dir)
|
||||
save_log(rpath, output_logs, compress=True)
|
||||
|
||||
print("\n\n", "*"*30, "\n\n", sep="")
|
||||
print("New route:", rel_log_dir, "\n")
|
||||
|
||||
if not check_openpilot_enabled(output_logs):
|
||||
raise Exception("Route did not engage for long enough")
|
||||
|
||||
if upload:
|
||||
upload_route(rel_log_dir)
|
||||
|
||||
return rel_log_dir
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
def comma_separated_list(string):
|
||||
return string.split(",")
|
||||
|
||||
all_procs = [p.proc_name for p in CONFIGS]
|
||||
parser = argparse.ArgumentParser(description="Generate new segments from old ones")
|
||||
parser.add_argument("--upload", action="store_true", help="Upload the new segment to the CI bucket")
|
||||
parser.add_argument("--outdir", help="log output dir", default=FAKEDATA)
|
||||
parser.add_argument("--dummy-dcamera", action='store_true', help="Use dummy blank driver camera")
|
||||
parser.add_argument("--whitelist-procs", type=comma_separated_list, default=all_procs,
|
||||
help="Comma-separated whitelist of processes to regen (e.g. controlsd,radard)")
|
||||
parser.add_argument("--blacklist-procs", type=comma_separated_list, default=[],
|
||||
help="Comma-separated blacklist of processes to regen (e.g. controlsd,radard)")
|
||||
parser.add_argument("route", type=str, help="The source route")
|
||||
parser.add_argument("seg", type=int, help="Segment in source route")
|
||||
args = parser.parse_args()
|
||||
|
||||
blacklist_set = set(args.blacklist_procs)
|
||||
processes = [p for p in args.whitelist_procs if p not in blacklist_set]
|
||||
regen_and_save(args.route, args.seg, processes=processes, upload=args.upload, outdir=args.outdir, dummy_driver_cam=args.dummy_dcamera)
|
||||
54
selfdrive/test/process_replay/regen_all.py
Normal file
54
selfdrive/test/process_replay/regen_all.py
Normal file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import concurrent.futures
|
||||
import os
|
||||
import random
|
||||
import traceback
|
||||
from tqdm import tqdm
|
||||
|
||||
from openpilot.common.prefix import OpenpilotPrefix
|
||||
from openpilot.selfdrive.test.process_replay.regen import regen_and_save
|
||||
from openpilot.selfdrive.test.process_replay.test_processes import FAKEDATA, source_segments as segments
|
||||
from openpilot.tools.lib.route import SegmentName
|
||||
|
||||
|
||||
def regen_job(segment, upload, disable_tqdm):
|
||||
with OpenpilotPrefix():
|
||||
sn = SegmentName(segment[1])
|
||||
fake_dongle_id = 'regen' + ''.join(random.choice('0123456789ABCDEF') for _ in range(11))
|
||||
try:
|
||||
relr = regen_and_save(sn.route_name.canonical_name, sn.segment_num, upload=upload, use_route_meta=False,
|
||||
outdir=os.path.join(FAKEDATA, fake_dongle_id), disable_tqdm=disable_tqdm, dummy_driver_cam=True)
|
||||
relr = '|'.join(relr.split('/')[-2:])
|
||||
return f' ("{segment[0]}", "{relr}"), '
|
||||
except Exception as e:
|
||||
err = f" {segment} failed: {str(e)}"
|
||||
err += traceback.format_exc()
|
||||
err += "\n\n"
|
||||
return err
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
all_cars = {car for car, _ in segments}
|
||||
|
||||
parser = argparse.ArgumentParser(description="Generate new segments from old ones")
|
||||
parser.add_argument("-j", "--jobs", type=int, default=1)
|
||||
parser.add_argument("--no-upload", action="store_true")
|
||||
parser.add_argument("--whitelist-cars", type=str, nargs="*", default=all_cars,
|
||||
help="Whitelist given cars from the test (e.g. HONDA)")
|
||||
parser.add_argument("--blacklist-cars", type=str, nargs="*", default=[],
|
||||
help="Blacklist given cars from the test (e.g. HONDA)")
|
||||
args = parser.parse_args()
|
||||
|
||||
tested_cars = set(args.whitelist_cars) - set(args.blacklist_cars)
|
||||
tested_cars = {c.upper() for c in tested_cars}
|
||||
tested_segments = [(car, segment) for car, segment in segments if car in tested_cars]
|
||||
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=args.jobs) as pool:
|
||||
p = pool.map(regen_job, tested_segments, [not args.no_upload] * len(tested_segments), [args.jobs > 1] * len(tested_segments))
|
||||
msg = "Copy these new segments into test_processes.py:"
|
||||
for seg in tqdm(p, desc="Generating segments", total=len(tested_segments)):
|
||||
msg += "\n" + str(seg)
|
||||
print()
|
||||
print()
|
||||
print(msg)
|
||||
196
selfdrive/test/process_replay/test_debayer.py
Normal file
196
selfdrive/test/process_replay/test_debayer.py
Normal file
@@ -0,0 +1,196 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import bz2
|
||||
import numpy as np
|
||||
|
||||
import pyopencl as cl # install with `PYOPENCL_CL_PRETEND_VERSION=2.0 pip install pyopencl`
|
||||
|
||||
from openpilot.system.hardware import PC, TICI
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
from openpilot.tools.lib.openpilotci import BASE_URL
|
||||
from openpilot.system.version import get_commit
|
||||
from openpilot.system.camerad.snapshot.snapshot import yuv_to_rgb
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
from openpilot.tools.lib.filereader import FileReader
|
||||
|
||||
TEST_ROUTE = "8345e3b82948d454|2022-05-04--13-45-33/0"
|
||||
|
||||
FRAME_WIDTH = 1928
|
||||
FRAME_HEIGHT = 1208
|
||||
FRAME_STRIDE = 2896
|
||||
|
||||
UV_WIDTH = FRAME_WIDTH // 2
|
||||
UV_HEIGHT = FRAME_HEIGHT // 2
|
||||
UV_SIZE = UV_WIDTH * UV_HEIGHT
|
||||
|
||||
|
||||
def get_frame_fn(ref_commit, test_route, tici=True):
|
||||
return f"{test_route}_debayer{'_tici' if tici else ''}_{ref_commit}.bz2"
|
||||
|
||||
|
||||
def bzip_frames(frames):
|
||||
data = b''
|
||||
for y, u, v in frames:
|
||||
data += y.tobytes()
|
||||
data += u.tobytes()
|
||||
data += v.tobytes()
|
||||
return bz2.compress(data)
|
||||
|
||||
|
||||
def unbzip_frames(url):
|
||||
with FileReader(url) as f:
|
||||
dat = f.read()
|
||||
|
||||
data = bz2.decompress(dat)
|
||||
|
||||
res = []
|
||||
for y_start in range(0, len(data), FRAME_WIDTH * FRAME_HEIGHT + UV_SIZE * 2):
|
||||
u_start = y_start + FRAME_WIDTH * FRAME_HEIGHT
|
||||
v_start = u_start + UV_SIZE
|
||||
|
||||
y = np.frombuffer(data[y_start: u_start], dtype=np.uint8).reshape((FRAME_HEIGHT, FRAME_WIDTH))
|
||||
u = np.frombuffer(data[u_start: v_start], dtype=np.uint8).reshape((UV_HEIGHT, UV_WIDTH))
|
||||
v = np.frombuffer(data[v_start: v_start + UV_SIZE], dtype=np.uint8).reshape((UV_HEIGHT, UV_WIDTH))
|
||||
|
||||
res.append((y, u, v))
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def init_kernels(frame_offset=0):
|
||||
ctx = cl.create_some_context(interactive=False)
|
||||
|
||||
with open(os.path.join(BASEDIR, 'system/camerad/cameras/real_debayer.cl')) as f:
|
||||
build_args = ' -cl-fast-relaxed-math -cl-denorms-are-zero -cl-single-precision-constant' + \
|
||||
f' -DFRAME_STRIDE={FRAME_STRIDE} -DRGB_WIDTH={FRAME_WIDTH} -DRGB_HEIGHT={FRAME_HEIGHT} -DFRAME_OFFSET={frame_offset} -DCAM_NUM=0'
|
||||
if PC:
|
||||
build_args += ' -DHALF_AS_FLOAT=1 -cl-std=CL2.0'
|
||||
debayer_prg = cl.Program(ctx, f.read()).build(options=build_args)
|
||||
|
||||
return ctx, debayer_prg
|
||||
|
||||
def debayer_frame(ctx, debayer_prg, data, rgb=False):
|
||||
q = cl.CommandQueue(ctx)
|
||||
|
||||
yuv_buff = np.empty(FRAME_WIDTH * FRAME_HEIGHT + UV_SIZE * 2, dtype=np.uint8)
|
||||
|
||||
cam_g = cl.Buffer(ctx, cl.mem_flags.READ_ONLY | cl.mem_flags.COPY_HOST_PTR, hostbuf=data)
|
||||
yuv_g = cl.Buffer(ctx, cl.mem_flags.WRITE_ONLY, FRAME_WIDTH * FRAME_HEIGHT + UV_SIZE * 2)
|
||||
|
||||
local_worksize = (20, 20) if TICI else (4, 4)
|
||||
ev1 = debayer_prg.debayer10(q, (UV_WIDTH, UV_HEIGHT), local_worksize, cam_g, yuv_g)
|
||||
cl.enqueue_copy(q, yuv_buff, yuv_g, wait_for=[ev1]).wait()
|
||||
cl.enqueue_barrier(q)
|
||||
|
||||
y = yuv_buff[:FRAME_WIDTH*FRAME_HEIGHT].reshape((FRAME_HEIGHT, FRAME_WIDTH))
|
||||
u = yuv_buff[FRAME_WIDTH*FRAME_HEIGHT:FRAME_WIDTH*FRAME_HEIGHT+UV_SIZE].reshape((UV_HEIGHT, UV_WIDTH))
|
||||
v = yuv_buff[FRAME_WIDTH*FRAME_HEIGHT+UV_SIZE:].reshape((UV_HEIGHT, UV_WIDTH))
|
||||
|
||||
if rgb:
|
||||
return yuv_to_rgb(y, u, v)
|
||||
else:
|
||||
return y, u, v
|
||||
|
||||
|
||||
def debayer_replay(lr):
|
||||
ctx, debayer_prg = init_kernels()
|
||||
|
||||
frames = []
|
||||
for m in lr:
|
||||
if m.which() == 'roadCameraState':
|
||||
cs = m.roadCameraState
|
||||
if cs.image:
|
||||
data = np.frombuffer(cs.image, dtype=np.uint8)
|
||||
img = debayer_frame(ctx, debayer_prg, data)
|
||||
|
||||
frames.append(img)
|
||||
|
||||
return frames
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
update = "--update" in sys.argv
|
||||
replay_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
ref_commit_fn = os.path.join(replay_dir, "debayer_replay_ref_commit")
|
||||
|
||||
# load logs
|
||||
lr = list(LogReader(TEST_ROUTE))
|
||||
|
||||
# run replay
|
||||
frames = debayer_replay(lr)
|
||||
|
||||
# get diff
|
||||
failed = False
|
||||
diff = ''
|
||||
yuv_i = ['y', 'u', 'v']
|
||||
if not update:
|
||||
with open(ref_commit_fn) as f:
|
||||
ref_commit = f.read().strip()
|
||||
frame_fn = get_frame_fn(ref_commit, TEST_ROUTE, tici=TICI)
|
||||
|
||||
try:
|
||||
cmp_frames = unbzip_frames(BASE_URL + frame_fn)
|
||||
|
||||
if len(frames) != len(cmp_frames):
|
||||
failed = True
|
||||
diff += 'amount of frames not equal\n'
|
||||
|
||||
for i, (frame, cmp_frame) in enumerate(zip(frames, cmp_frames, strict=True)):
|
||||
for j in range(3):
|
||||
fr = frame[j]
|
||||
cmp_f = cmp_frame[j]
|
||||
if fr.shape != cmp_f.shape:
|
||||
failed = True
|
||||
diff += f'frame shapes not equal for ({i}, {yuv_i[j]})\n'
|
||||
diff += f'{ref_commit}: {cmp_f.shape}\n'
|
||||
diff += f'HEAD: {fr.shape}\n'
|
||||
elif not np.array_equal(fr, cmp_f):
|
||||
failed = True
|
||||
if np.allclose(fr, cmp_f, atol=1):
|
||||
diff += f'frames not equal for ({i}, {yuv_i[j]}), but are all close\n'
|
||||
else:
|
||||
diff += f'frames not equal for ({i}, {yuv_i[j]})\n'
|
||||
|
||||
frame_diff = np.abs(np.subtract(fr, cmp_f))
|
||||
diff_len = len(np.nonzero(frame_diff)[0])
|
||||
if diff_len > 10000:
|
||||
diff += f'different at a large amount of pixels ({diff_len})\n'
|
||||
else:
|
||||
diff += 'different at (frame, yuv, pixel, ref, HEAD):\n'
|
||||
for k in zip(*np.nonzero(frame_diff), strict=True):
|
||||
diff += f'{i}, {yuv_i[j]}, {k}, {cmp_f[k]}, {fr[k]}\n'
|
||||
|
||||
if failed:
|
||||
print(diff)
|
||||
with open("debayer_diff.txt", "w") as f:
|
||||
f.write(diff)
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
failed = True
|
||||
|
||||
# upload new refs
|
||||
if update or (failed and TICI):
|
||||
from openpilot.tools.lib.openpilotci import upload_file
|
||||
|
||||
print("Uploading new refs")
|
||||
|
||||
frames_bzip = bzip_frames(frames)
|
||||
|
||||
new_commit = get_commit()
|
||||
frame_fn = os.path.join(replay_dir, get_frame_fn(new_commit, TEST_ROUTE, tici=TICI))
|
||||
with open(frame_fn, "wb") as f2:
|
||||
f2.write(frames_bzip)
|
||||
|
||||
try:
|
||||
upload_file(frame_fn, os.path.basename(frame_fn))
|
||||
except Exception as e:
|
||||
print("failed to upload", e)
|
||||
|
||||
if update:
|
||||
with open(ref_commit_fn, 'w') as f:
|
||||
f.write(str(new_commit))
|
||||
|
||||
print("\nNew ref commit: ", new_commit)
|
||||
|
||||
sys.exit(int(failed))
|
||||
33
selfdrive/test/process_replay/test_fuzzy.py
Normal file
33
selfdrive/test/process_replay/test_fuzzy.py
Normal file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env python3
|
||||
import copy
|
||||
from hypothesis import given, HealthCheck, Phase, settings
|
||||
import hypothesis.strategies as st
|
||||
from parameterized import parameterized
|
||||
import unittest
|
||||
|
||||
from cereal import log
|
||||
from openpilot.selfdrive.car.toyota.values import CAR as TOYOTA
|
||||
from openpilot.selfdrive.test.fuzzy_generation import FuzzyGenerator
|
||||
import openpilot.selfdrive.test.process_replay.process_replay as pr
|
||||
|
||||
# These processes currently fail because of unrealistic data breaking assumptions
|
||||
# that openpilot makes causing error with NaN, inf, int size, array indexing ...
|
||||
# TODO: Make each one testable
|
||||
NOT_TESTED = ['controlsd', 'plannerd', 'calibrationd', 'dmonitoringd', 'paramsd', 'dmonitoringmodeld', 'modeld']
|
||||
|
||||
TEST_CASES = [(cfg.proc_name, copy.deepcopy(cfg)) for cfg in pr.CONFIGS if cfg.proc_name not in NOT_TESTED]
|
||||
|
||||
class TestFuzzProcesses(unittest.TestCase):
|
||||
|
||||
# TODO: make this faster and increase examples
|
||||
@parameterized.expand(TEST_CASES)
|
||||
@given(st.data())
|
||||
@settings(phases=[Phase.generate, Phase.target], max_examples=10, deadline=1000, suppress_health_check=[HealthCheck.too_slow, HealthCheck.data_too_large])
|
||||
def test_fuzz_process(self, proc_name, cfg, data):
|
||||
msgs = FuzzyGenerator.get_random_event_msg(data.draw, events=cfg.pubs, real_floats=True)
|
||||
lr = [log.Event.new_message(**m).as_reader() for m in msgs]
|
||||
cfg.timeout = 5
|
||||
pr.replay_process(cfg, lr, fingerprint=TOYOTA.COROLLA_TSS2, disable_progress=True)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
231
selfdrive/test/process_replay/test_processes.py
Normal file
231
selfdrive/test/process_replay/test_processes.py
Normal file
@@ -0,0 +1,231 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import concurrent.futures
|
||||
import os
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from tqdm import tqdm
|
||||
from typing import Any
|
||||
|
||||
from openpilot.selfdrive.car.car_helpers import interface_names
|
||||
from openpilot.tools.lib.openpilotci import get_url, upload_file
|
||||
from openpilot.selfdrive.test.process_replay.compare_logs import compare_logs, format_diff
|
||||
from openpilot.selfdrive.test.process_replay.process_replay import CONFIGS, PROC_REPLAY_DIR, FAKEDATA, check_openpilot_enabled, replay_process
|
||||
from openpilot.system.version import get_commit
|
||||
from openpilot.tools.lib.filereader import FileReader
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
from openpilot.tools.lib.helpers import save_log
|
||||
|
||||
source_segments = [
|
||||
("BODY", "937ccb7243511b65|2022-05-24--16-03-09--1"), # COMMA.BODY
|
||||
("HYUNDAI", "02c45f73a2e5c6e9|2021-01-01--19-08-22--1"), # HYUNDAI.SONATA
|
||||
("HYUNDAI2", "d545129f3ca90f28|2022-11-07--20-43-08--3"), # HYUNDAI.KIA_EV6 (+ QCOM GPS)
|
||||
("TOYOTA", "0982d79ebb0de295|2021-01-04--17-13-21--13"), # TOYOTA.PRIUS
|
||||
("TOYOTA2", "0982d79ebb0de295|2021-01-03--20-03-36--6"), # TOYOTA.RAV4
|
||||
("TOYOTA3", "f7d7e3538cda1a2a|2021-08-16--08-55-34--6"), # TOYOTA.COROLLA_TSS2
|
||||
("HONDA", "eb140f119469d9ab|2021-06-12--10-46-24--27"), # HONDA.CIVIC (NIDEC)
|
||||
("HONDA2", "7d2244f34d1bbcda|2021-06-25--12-25-37--26"), # HONDA.ACCORD (BOSCH)
|
||||
("CHRYSLER", "4deb27de11bee626|2021-02-20--11-28-55--8"), # CHRYSLER.PACIFICA_2018_HYBRID
|
||||
("RAM", "17fc16d840fe9d21|2023-04-26--13-28-44--5"), # CHRYSLER.RAM_1500
|
||||
("SUBARU", "341dccd5359e3c97|2022-09-12--10-35-33--3"), # SUBARU.OUTBACK
|
||||
("GM", "0c58b6a25109da2b|2021-02-23--16-35-50--11"), # GM.VOLT
|
||||
("GM2", "376bf99325883932|2022-10-27--13-41-22--1"), # GM.BOLT_EUV
|
||||
("NISSAN", "35336926920f3571|2021-02-12--18-38-48--46"), # NISSAN.XTRAIL
|
||||
("VOLKSWAGEN", "de9592456ad7d144|2021-06-29--11-00-15--6"), # VOLKSWAGEN.GOLF
|
||||
("MAZDA", "bd6a637565e91581|2021-10-30--15-14-53--4"), # MAZDA.CX9_2021
|
||||
("FORD", "54827bf84c38b14f|2023-01-26--21-59-07--4"), # FORD.BRONCO_SPORT_MK1
|
||||
|
||||
# Enable when port is tested and dashcamOnly is no longer set
|
||||
#("TESLA", "bb50caf5f0945ab1|2021-06-19--17-20-18--3"), # TESLA.AP2_MODELS
|
||||
#("VOLKSWAGEN2", "3cfdec54aa035f3f|2022-07-19--23-45-10--2"), # VOLKSWAGEN.PASSAT_NMS
|
||||
]
|
||||
|
||||
segments = [
|
||||
("BODY", "regen997DF2697CB|2023-10-30--23-14-29--0"),
|
||||
("HYUNDAI", "regen2A9D2A8E0B4|2023-10-30--23-13-34--0"),
|
||||
("HYUNDAI2", "regen6CA24BC3035|2023-10-30--23-14-28--0"),
|
||||
("TOYOTA", "regen5C019D76307|2023-10-30--23-13-31--0"),
|
||||
("TOYOTA2", "regen5DCADA88A96|2023-10-30--23-14-57--0"),
|
||||
("TOYOTA3", "regen7204CA3A498|2023-10-30--23-15-55--0"),
|
||||
("HONDA", "regen048F8FA0B24|2023-10-30--23-15-53--0"),
|
||||
("HONDA2", "regen7D2D3F82D5B|2023-10-30--23-15-55--0"),
|
||||
("CHRYSLER", "regen7125C42780C|2023-10-30--23-16-21--0"),
|
||||
("RAM", "regen2731F3213D2|2023-10-30--23-18-11--0"),
|
||||
("SUBARU", "regen86E4C1B4DDD|2023-10-30--23-18-14--0"),
|
||||
("GM", "regenF6393D64745|2023-10-30--23-17-18--0"),
|
||||
("GM2", "regen220F830C05B|2023-10-30--23-18-39--0"),
|
||||
("NISSAN", "regen4F671F7C435|2023-10-30--23-18-40--0"),
|
||||
("VOLKSWAGEN", "regen8BDFE7307A0|2023-10-30--23-19-36--0"),
|
||||
("MAZDA", "regen2E9F1A15FD5|2023-10-30--23-20-36--0"),
|
||||
("FORD", "regen6D39E54606E|2023-10-30--23-20-54--0"),
|
||||
]
|
||||
|
||||
# dashcamOnly makes don't need to be tested until a full port is done
|
||||
excluded_interfaces = ["mock", "tesla"]
|
||||
|
||||
BASE_URL = "https://commadataci.blob.core.windows.net/openpilotci/"
|
||||
REF_COMMIT_FN = os.path.join(PROC_REPLAY_DIR, "ref_commit")
|
||||
EXCLUDED_PROCS = {"modeld", "dmonitoringmodeld"}
|
||||
|
||||
|
||||
def run_test_process(data):
|
||||
segment, cfg, args, cur_log_fn, ref_log_path, lr_dat = data
|
||||
res = None
|
||||
if not args.upload_only:
|
||||
lr = LogReader.from_bytes(lr_dat)
|
||||
res, log_msgs = test_process(cfg, lr, segment, ref_log_path, cur_log_fn, args.ignore_fields, args.ignore_msgs)
|
||||
# save logs so we can upload when updating refs
|
||||
save_log(cur_log_fn, log_msgs)
|
||||
|
||||
if args.update_refs or args.upload_only:
|
||||
print(f'Uploading: {os.path.basename(cur_log_fn)}')
|
||||
assert os.path.exists(cur_log_fn), f"Cannot find log to upload: {cur_log_fn}"
|
||||
upload_file(cur_log_fn, os.path.basename(cur_log_fn))
|
||||
os.remove(cur_log_fn)
|
||||
return (segment, cfg.proc_name, res)
|
||||
|
||||
|
||||
def get_log_data(segment):
|
||||
r, n = segment.rsplit("--", 1)
|
||||
with FileReader(get_url(r, n)) as f:
|
||||
return (segment, f.read())
|
||||
|
||||
|
||||
def test_process(cfg, lr, segment, ref_log_path, new_log_path, ignore_fields=None, ignore_msgs=None):
|
||||
if ignore_fields is None:
|
||||
ignore_fields = []
|
||||
if ignore_msgs is None:
|
||||
ignore_msgs = []
|
||||
|
||||
ref_log_msgs = list(LogReader(ref_log_path))
|
||||
|
||||
try:
|
||||
log_msgs = replay_process(cfg, lr, disable_progress=True)
|
||||
except Exception as e:
|
||||
raise Exception("failed on segment: " + segment) from e
|
||||
|
||||
# check to make sure openpilot is engaged in the route
|
||||
if cfg.proc_name == "controlsd":
|
||||
if not check_openpilot_enabled(log_msgs):
|
||||
# FIXME: these segments should work, but the replay enabling logic is too brittle
|
||||
if segment not in ("regen6CA24BC3035|2023-10-30--23-14-28--0", "regen7D2D3F82D5B|2023-10-30--23-15-55--0"):
|
||||
return f"Route did not enable at all or for long enough: {new_log_path}", log_msgs
|
||||
|
||||
try:
|
||||
return compare_logs(ref_log_msgs, log_msgs, ignore_fields + cfg.ignore, ignore_msgs, cfg.tolerance), log_msgs
|
||||
except Exception as e:
|
||||
return str(e), log_msgs
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
all_cars = {car for car, _ in segments}
|
||||
all_procs = {cfg.proc_name for cfg in CONFIGS if cfg.proc_name not in EXCLUDED_PROCS}
|
||||
|
||||
cpu_count = os.cpu_count() or 1
|
||||
|
||||
parser = argparse.ArgumentParser(description="Regression test to identify changes in a process's output")
|
||||
parser.add_argument("--whitelist-procs", type=str, nargs="*", default=all_procs,
|
||||
help="Whitelist given processes from the test (e.g. controlsd)")
|
||||
parser.add_argument("--whitelist-cars", type=str, nargs="*", default=all_cars,
|
||||
help="Whitelist given cars from the test (e.g. HONDA)")
|
||||
parser.add_argument("--blacklist-procs", type=str, nargs="*", default=[],
|
||||
help="Blacklist given processes from the test (e.g. controlsd)")
|
||||
parser.add_argument("--blacklist-cars", type=str, nargs="*", default=[],
|
||||
help="Blacklist given cars from the test (e.g. HONDA)")
|
||||
parser.add_argument("--ignore-fields", type=str, nargs="*", default=[],
|
||||
help="Extra fields or msgs to ignore (e.g. carState.events)")
|
||||
parser.add_argument("--ignore-msgs", type=str, nargs="*", default=[],
|
||||
help="Msgs to ignore (e.g. carEvents)")
|
||||
parser.add_argument("--update-refs", action="store_true",
|
||||
help="Updates reference logs using current commit")
|
||||
parser.add_argument("--upload-only", action="store_true",
|
||||
help="Skips testing processes and uploads logs from previous test run")
|
||||
parser.add_argument("-j", "--jobs", type=int, default=max(cpu_count - 2, 1),
|
||||
help="Max amount of parallel jobs")
|
||||
args = parser.parse_args()
|
||||
|
||||
tested_procs = set(args.whitelist_procs) - set(args.blacklist_procs)
|
||||
tested_cars = set(args.whitelist_cars) - set(args.blacklist_cars)
|
||||
tested_cars = {c.upper() for c in tested_cars}
|
||||
|
||||
full_test = (tested_procs == all_procs) and (tested_cars == all_cars) and all(len(x) == 0 for x in (args.ignore_fields, args.ignore_msgs))
|
||||
upload = args.update_refs or args.upload_only
|
||||
os.makedirs(os.path.dirname(FAKEDATA), exist_ok=True)
|
||||
|
||||
if upload:
|
||||
assert full_test, "Need to run full test when updating refs"
|
||||
|
||||
try:
|
||||
with open(REF_COMMIT_FN) as f:
|
||||
ref_commit = f.read().strip()
|
||||
except FileNotFoundError:
|
||||
print("Couldn't find reference commit")
|
||||
sys.exit(1)
|
||||
|
||||
cur_commit = get_commit()
|
||||
if not cur_commit:
|
||||
raise Exception("Couldn't get current commit")
|
||||
|
||||
print(f"***** testing against commit {ref_commit} *****")
|
||||
|
||||
# check to make sure all car brands are tested
|
||||
if full_test:
|
||||
untested = (set(interface_names) - set(excluded_interfaces)) - {c.lower() for c in tested_cars}
|
||||
assert len(untested) == 0, f"Cars missing routes: {str(untested)}"
|
||||
|
||||
log_paths: defaultdict[str, dict[str, dict[str, str]]] = defaultdict(lambda: defaultdict(dict))
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=args.jobs) as pool:
|
||||
if not args.upload_only:
|
||||
download_segments = [seg for car, seg in segments if car in tested_cars]
|
||||
log_data: dict[str, LogReader] = {}
|
||||
p1 = pool.map(get_log_data, download_segments)
|
||||
for segment, lr in tqdm(p1, desc="Getting Logs", total=len(download_segments)):
|
||||
log_data[segment] = lr
|
||||
|
||||
pool_args: Any = []
|
||||
for car_brand, segment in segments:
|
||||
if car_brand not in tested_cars:
|
||||
continue
|
||||
|
||||
for cfg in CONFIGS:
|
||||
if cfg.proc_name not in tested_procs:
|
||||
continue
|
||||
|
||||
cur_log_fn = os.path.join(FAKEDATA, f"{segment}_{cfg.proc_name}_{cur_commit}.bz2")
|
||||
if args.update_refs: # reference logs will not exist if routes were just regenerated
|
||||
ref_log_path = get_url(*segment.rsplit("--", 1))
|
||||
else:
|
||||
ref_log_fn = os.path.join(FAKEDATA, f"{segment}_{cfg.proc_name}_{ref_commit}.bz2")
|
||||
ref_log_path = ref_log_fn if os.path.exists(ref_log_fn) else BASE_URL + os.path.basename(ref_log_fn)
|
||||
|
||||
dat = None if args.upload_only else log_data[segment]
|
||||
pool_args.append((segment, cfg, args, cur_log_fn, ref_log_path, dat))
|
||||
|
||||
log_paths[segment][cfg.proc_name]['ref'] = ref_log_path
|
||||
log_paths[segment][cfg.proc_name]['new'] = cur_log_fn
|
||||
|
||||
results: Any = defaultdict(dict)
|
||||
p2 = pool.map(run_test_process, pool_args)
|
||||
for (segment, proc, result) in tqdm(p2, desc="Running Tests", total=len(pool_args)):
|
||||
if not args.upload_only:
|
||||
results[segment][proc] = result
|
||||
|
||||
diff_short, diff_long, failed = format_diff(results, log_paths, ref_commit)
|
||||
if not upload:
|
||||
with open(os.path.join(PROC_REPLAY_DIR, "diff.txt"), "w") as f:
|
||||
f.write(diff_long)
|
||||
print(diff_short)
|
||||
|
||||
if failed:
|
||||
print("TEST FAILED")
|
||||
print("\n\nTo push the new reference logs for this commit run:")
|
||||
print("./test_processes.py --upload-only")
|
||||
else:
|
||||
print("TEST SUCCEEDED")
|
||||
|
||||
else:
|
||||
with open(REF_COMMIT_FN, "w") as f:
|
||||
f.write(cur_commit)
|
||||
print(f"\n\nUpdated reference logs for commit: {cur_commit}")
|
||||
|
||||
sys.exit(int(failed))
|
||||
45
selfdrive/test/process_replay/test_regen.py
Normal file
45
selfdrive/test/process_replay/test_regen.py
Normal file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import unittest
|
||||
|
||||
from parameterized import parameterized
|
||||
|
||||
from openpilot.selfdrive.test.process_replay.regen import regen_segment, DummyFrameReader
|
||||
from openpilot.selfdrive.test.process_replay.process_replay import check_openpilot_enabled
|
||||
from openpilot.tools.lib.openpilotci import get_url
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
from openpilot.tools.lib.framereader import FrameReader
|
||||
|
||||
TESTED_SEGMENTS = [
|
||||
("PRIUS_C2", "0982d79ebb0de295|2021-01-04--17-13-21--13"), # TOYOTA PRIUS 2017: NEO, pandaStateDEPRECATED, no peripheralState, sensorEventsDEPRECATED
|
||||
# Enable these once regen on CI becomes faster or use them for different tests running controlsd in isolation
|
||||
# ("MAZDA_C3", "bd6a637565e91581|2021-10-30--15-14-53--4"), # MAZDA.CX9_2021: TICI, incomplete managerState
|
||||
# ("FORD_C3", "54827bf84c38b14f|2023-01-26--21-59-07--4"), # FORD.BRONCO_SPORT_MK1: TICI
|
||||
]
|
||||
|
||||
|
||||
def ci_setup_data_readers(route, sidx):
|
||||
lr = LogReader(get_url(route, sidx, "rlog"))
|
||||
frs = {
|
||||
'roadCameraState': FrameReader(get_url(route, sidx, "fcamera")),
|
||||
'driverCameraState': DummyFrameReader.zero_dcamera()
|
||||
}
|
||||
if next((True for m in lr if m.which() == "wideRoadCameraState"), False):
|
||||
frs["wideRoadCameraState"] = FrameReader(get_url(route, sidx, "ecamera"))
|
||||
|
||||
return lr, frs
|
||||
|
||||
|
||||
class TestRegen(unittest.TestCase):
|
||||
@parameterized.expand(TESTED_SEGMENTS)
|
||||
def test_engaged(self, case_name, segment):
|
||||
route, sidx = segment.rsplit("--", 1)
|
||||
lr, frs = ci_setup_data_readers(route, sidx)
|
||||
output_logs = regen_segment(lr, frs, disable_tqdm=True)
|
||||
|
||||
engaged = check_openpilot_enabled(output_logs)
|
||||
self.assertTrue(engaged, f"openpilot not engaged in {case_name}")
|
||||
|
||||
|
||||
if __name__=='__main__':
|
||||
unittest.main()
|
||||
43
selfdrive/test/process_replay/vision_meta.py
Normal file
43
selfdrive/test/process_replay/vision_meta.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from collections import namedtuple
|
||||
from cereal.visionipc import VisionStreamType
|
||||
from openpilot.common.realtime import DT_MDL, DT_DMON
|
||||
from openpilot.common.transformations.camera import tici_f_frame_size, tici_d_frame_size, tici_e_frame_size, eon_f_frame_size, eon_d_frame_size
|
||||
|
||||
VideoStreamMeta = namedtuple("VideoStreamMeta", ["camera_state", "encode_index", "stream", "dt", "frame_sizes"])
|
||||
ROAD_CAMERA_FRAME_SIZES = {"tici": tici_f_frame_size, "tizi": tici_f_frame_size, "neo": eon_f_frame_size}
|
||||
WIDE_ROAD_CAMERA_FRAME_SIZES = {"tici": tici_e_frame_size, "tizi": tici_e_frame_size}
|
||||
DRIVER_FRAME_SIZES = {"tici": tici_d_frame_size, "tizi": tici_d_frame_size, "neo": eon_d_frame_size}
|
||||
VIPC_STREAM_METADATA = [
|
||||
# metadata: (state_msg_type, encode_msg_type, stream_type, dt, frame_sizes)
|
||||
("roadCameraState", "roadEncodeIdx", VisionStreamType.VISION_STREAM_ROAD, DT_MDL, ROAD_CAMERA_FRAME_SIZES),
|
||||
("wideRoadCameraState", "wideRoadEncodeIdx", VisionStreamType.VISION_STREAM_WIDE_ROAD, DT_MDL, WIDE_ROAD_CAMERA_FRAME_SIZES),
|
||||
("driverCameraState", "driverEncodeIdx", VisionStreamType.VISION_STREAM_DRIVER, DT_DMON, DRIVER_FRAME_SIZES),
|
||||
]
|
||||
|
||||
|
||||
def meta_from_camera_state(state):
|
||||
meta = next((VideoStreamMeta(*meta) for meta in VIPC_STREAM_METADATA if meta[0] == state), None)
|
||||
return meta
|
||||
|
||||
|
||||
def meta_from_encode_index(encode_index):
|
||||
meta = next((VideoStreamMeta(*meta) for meta in VIPC_STREAM_METADATA if meta[1] == encode_index), None)
|
||||
return meta
|
||||
|
||||
|
||||
def meta_from_stream_type(stream_type):
|
||||
meta = next((VideoStreamMeta(*meta) for meta in VIPC_STREAM_METADATA if meta[2] == stream_type), None)
|
||||
return meta
|
||||
|
||||
|
||||
def available_streams(lr=None):
|
||||
if lr is None:
|
||||
return [VideoStreamMeta(*meta) for meta in VIPC_STREAM_METADATA]
|
||||
|
||||
result = []
|
||||
for meta in VIPC_STREAM_METADATA:
|
||||
has_cam_state = next((True for m in lr if m.which() == meta[0]), False)
|
||||
if has_cam_state:
|
||||
result.append(VideoStreamMeta(*meta))
|
||||
|
||||
return result
|
||||
Reference in New Issue
Block a user