Add openpilot tests
This commit is contained in:
9
selfdrive/test/.gitignore
vendored
Normal file
9
selfdrive/test/.gitignore
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
out/
|
||||
docker_out/
|
||||
|
||||
process_replay/diff.txt
|
||||
process_replay/model_diff.txt
|
||||
valgrind_logs.txt
|
||||
|
||||
*.bz2
|
||||
*.hevc
|
||||
19
selfdrive/test/ci_shell.sh
Normal file
19
selfdrive/test/ci_shell.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/bin/bash -e
|
||||
|
||||
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null && pwd)"
|
||||
OP_ROOT="$DIR/../../"
|
||||
|
||||
if [ -z "$BUILD" ]; then
|
||||
docker pull ghcr.io/commaai/openpilot-base:latest
|
||||
else
|
||||
docker build --cache-from ghcr.io/commaai/openpilot-base:latest -t ghcr.io/commaai/openpilot-base:latest -f $OP_ROOT/Dockerfile.openpilot_base .
|
||||
fi
|
||||
|
||||
docker run \
|
||||
-it \
|
||||
--rm \
|
||||
--volume $OP_ROOT:$OP_ROOT \
|
||||
--workdir $PWD \
|
||||
--env PYTHONPATH=$OP_ROOT \
|
||||
ghcr.io/commaai/openpilot-base:latest \
|
||||
/bin/bash
|
||||
58
selfdrive/test/ciui.py
Normal file
58
selfdrive/test/ciui.py
Normal file
@@ -0,0 +1,58 @@
|
||||
#!/usr/bin/env python3
|
||||
import signal
|
||||
import subprocess
|
||||
|
||||
signal.signal(signal.SIGINT, signal.SIG_DFL)
|
||||
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
||||
|
||||
from PyQt5.QtCore import QTimer
|
||||
from PyQt5.QtWidgets import QApplication, QWidget, QVBoxLayout, QLabel
|
||||
from openpilot.selfdrive.ui.qt.python_helpers import set_main_window
|
||||
|
||||
class Window(QWidget):
|
||||
def __init__(self, parent=None):
|
||||
super().__init__(parent)
|
||||
|
||||
layout = QVBoxLayout()
|
||||
self.setLayout(layout)
|
||||
|
||||
self.l = QLabel("jenkins runner")
|
||||
layout.addWidget(self.l)
|
||||
layout.addStretch(1)
|
||||
layout.setContentsMargins(20, 20, 20, 20)
|
||||
|
||||
cmds = [
|
||||
"cat /etc/hostname",
|
||||
"echo AGNOS v$(cat /VERSION)",
|
||||
"uptime -p",
|
||||
]
|
||||
self.labels = {}
|
||||
for c in cmds:
|
||||
self.labels[c] = QLabel(c)
|
||||
layout.addWidget(self.labels[c])
|
||||
|
||||
self.setStyleSheet("""
|
||||
* {
|
||||
color: white;
|
||||
font-size: 55px;
|
||||
background-color: black;
|
||||
font-family: "JetBrains Mono";
|
||||
}
|
||||
""")
|
||||
|
||||
self.timer = QTimer()
|
||||
self.timer.timeout.connect(self.update)
|
||||
self.timer.start(10 * 1000)
|
||||
self.update()
|
||||
|
||||
def update(self):
|
||||
for cmd, label in self.labels.items():
|
||||
out = subprocess.run(cmd, capture_output=True,
|
||||
shell=True, check=False, encoding='utf8').stdout
|
||||
label.setText(out.strip())
|
||||
|
||||
if __name__ == "__main__":
|
||||
app = QApplication([])
|
||||
w = Window()
|
||||
set_main_window(w)
|
||||
app.exec_()
|
||||
11
selfdrive/test/cpp_harness.py
Normal file
11
selfdrive/test/cpp_harness.py
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env python3
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from openpilot.common.prefix import OpenpilotPrefix
|
||||
|
||||
|
||||
with OpenpilotPrefix():
|
||||
ret = subprocess.call(sys.argv[1:])
|
||||
|
||||
exit(ret)
|
||||
26
selfdrive/test/docker_build.sh
Normal file
26
selfdrive/test/docker_build.sh
Normal file
@@ -0,0 +1,26 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
# To build sim and docs, you can run the following to mount the scons cache to the same place as in CI:
|
||||
# mkdir -p .ci_cache/scons_cache
|
||||
# sudo mount --bind /tmp/scons_cache/ .ci_cache/scons_cache
|
||||
|
||||
SCRIPT_DIR=$(dirname "$0")
|
||||
OPENPILOT_DIR=$SCRIPT_DIR/../../
|
||||
if [ -n "$TARGET_ARCHITECTURE" ]; then
|
||||
PLATFORM="linux/$TARGET_ARCHITECTURE"
|
||||
TAG_SUFFIX="-$TARGET_ARCHITECTURE"
|
||||
else
|
||||
PLATFORM="linux/$(uname -m)"
|
||||
TAG_SUFFIX=""
|
||||
fi
|
||||
|
||||
source $SCRIPT_DIR/docker_common.sh $1 "$TAG_SUFFIX"
|
||||
|
||||
DOCKER_BUILDKIT=1 docker buildx build --provenance false --pull --platform $PLATFORM --load --cache-to type=inline --cache-from type=registry,ref=$REMOTE_TAG -t $REMOTE_TAG -t $LOCAL_TAG -f $OPENPILOT_DIR/$DOCKER_FILE $OPENPILOT_DIR
|
||||
|
||||
if [ -n "$PUSH_IMAGE" ]; then
|
||||
docker push $REMOTE_TAG
|
||||
docker tag $REMOTE_TAG $REMOTE_SHA_TAG
|
||||
docker push $REMOTE_SHA_TAG
|
||||
fi
|
||||
21
selfdrive/test/docker_common.sh
Normal file
21
selfdrive/test/docker_common.sh
Normal file
@@ -0,0 +1,21 @@
|
||||
if [ "$1" = "base" ]; then
|
||||
export DOCKER_IMAGE=openpilot-base
|
||||
export DOCKER_FILE=Dockerfile.openpilot_base
|
||||
elif [ "$1" = "sim" ]; then
|
||||
export DOCKER_IMAGE=openpilot-sim
|
||||
export DOCKER_FILE=tools/sim/Dockerfile.sim
|
||||
elif [ "$1" = "prebuilt" ]; then
|
||||
export DOCKER_IMAGE=openpilot-prebuilt
|
||||
export DOCKER_FILE=Dockerfile.openpilot
|
||||
else
|
||||
echo "Invalid docker build image: '$1'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export DOCKER_REGISTRY=ghcr.io/commaai
|
||||
export COMMIT_SHA=$(git rev-parse HEAD)
|
||||
|
||||
TAG_SUFFIX=$2
|
||||
LOCAL_TAG=$DOCKER_IMAGE$TAG_SUFFIX
|
||||
REMOTE_TAG=$DOCKER_REGISTRY/$LOCAL_TAG
|
||||
REMOTE_SHA_TAG=$DOCKER_REGISTRY/$LOCAL_TAG:$COMMIT_SHA
|
||||
25
selfdrive/test/docker_tag_multiarch.sh
Normal file
25
selfdrive/test/docker_tag_multiarch.sh
Normal file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
if [ $# -lt 2 ]; then
|
||||
echo "Usage: $0 <base|docs|sim|prebuilt|cl> <arch1> <arch2> ..."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
SCRIPT_DIR=$(dirname "$0")
|
||||
ARCHS=("${@:2}")
|
||||
|
||||
source $SCRIPT_DIR/docker_common.sh $1
|
||||
|
||||
MANIFEST_AMENDS=""
|
||||
for ARCH in ${ARCHS[@]}; do
|
||||
MANIFEST_AMENDS="$MANIFEST_AMENDS --amend $REMOTE_TAG-$ARCH:$COMMIT_SHA"
|
||||
done
|
||||
|
||||
docker manifest create $REMOTE_TAG $MANIFEST_AMENDS
|
||||
docker manifest create $REMOTE_SHA_TAG $MANIFEST_AMENDS
|
||||
|
||||
if [[ -n "$PUSH_IMAGE" ]]; then
|
||||
docker manifest push $REMOTE_TAG
|
||||
docker manifest push $REMOTE_SHA_TAG
|
||||
fi
|
||||
@@ -1,6 +1,7 @@
|
||||
import capnp
|
||||
import hypothesis.strategies as st
|
||||
from typing import Any, Callable, Dict, List, Optional, Union
|
||||
from typing import Any
|
||||
from collections.abc import Callable
|
||||
|
||||
from cereal import log
|
||||
|
||||
@@ -12,7 +13,7 @@ class FuzzyGenerator:
|
||||
self.draw = draw
|
||||
self.real_floats = real_floats
|
||||
|
||||
def generate_native_type(self, field: str) -> st.SearchStrategy[Union[bool, int, float, str, bytes]]:
|
||||
def generate_native_type(self, field: str) -> st.SearchStrategy[bool | int | float | str | bytes]:
|
||||
def floats(**kwargs) -> st.SearchStrategy[float]:
|
||||
allow_nan = not self.real_floats
|
||||
allow_infinity = not self.real_floats
|
||||
@@ -67,18 +68,18 @@ class FuzzyGenerator:
|
||||
else:
|
||||
return self.generate_struct(field.schema)
|
||||
|
||||
def generate_struct(self, schema: capnp.lib.capnp._StructSchema, event: Optional[str] = None) -> st.SearchStrategy[Dict[str, Any]]:
|
||||
full_fill: List[str] = list(schema.non_union_fields)
|
||||
single_fill: List[str] = [event] if event else [self.draw(st.sampled_from(schema.union_fields))] if schema.union_fields else []
|
||||
def generate_struct(self, schema: capnp.lib.capnp._StructSchema, event: str = None) -> st.SearchStrategy[dict[str, Any]]:
|
||||
full_fill: list[str] = list(schema.non_union_fields)
|
||||
single_fill: list[str] = [event] if event else [self.draw(st.sampled_from(schema.union_fields))] if schema.union_fields else []
|
||||
return st.fixed_dictionaries({field: self.generate_field(schema.fields[field]) for field in full_fill + single_fill})
|
||||
|
||||
@classmethod
|
||||
def get_random_msg(cls, draw: DrawType, struct: capnp.lib.capnp._StructModule, real_floats: bool = False) -> Dict[str, Any]:
|
||||
def get_random_msg(cls, draw: DrawType, struct: capnp.lib.capnp._StructModule, real_floats: bool = False) -> dict[str, Any]:
|
||||
fg = cls(draw, real_floats=real_floats)
|
||||
data: Dict[str, Any] = draw(fg.generate_struct(struct.schema))
|
||||
data: dict[str, Any] = draw(fg.generate_struct(struct.schema))
|
||||
return data
|
||||
|
||||
@classmethod
|
||||
def get_random_event_msg(cls, draw: DrawType, events: List[str], real_floats: bool = False) -> List[Dict[str, Any]]:
|
||||
def get_random_event_msg(cls, draw: DrawType, events: list[str], real_floats: bool = False) -> list[dict[str, Any]]:
|
||||
fg = cls(draw, real_floats=real_floats)
|
||||
return [draw(fg.generate_struct(log.Event.schema, e)) for e in sorted(events)]
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import http.server
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
|
||||
from functools import wraps
|
||||
@@ -72,7 +74,29 @@ def noop(*args, **kwargs):
|
||||
|
||||
|
||||
def read_segment_list(segment_list_path):
|
||||
with open(segment_list_path, "r") as f:
|
||||
with open(segment_list_path) as f:
|
||||
seg_list = f.read().splitlines()
|
||||
|
||||
return [(platform[2:], segment) for platform, segment in zip(seg_list[::2], seg_list[1::2], strict=True)]
|
||||
|
||||
|
||||
def with_http_server(func, handler=http.server.BaseHTTPRequestHandler, setup=None):
|
||||
@wraps(func)
|
||||
def inner(*args, **kwargs):
|
||||
host = '127.0.0.1'
|
||||
server = http.server.HTTPServer((host, 0), handler)
|
||||
port = server.server_port
|
||||
t = threading.Thread(target=server.serve_forever)
|
||||
t.start()
|
||||
|
||||
if setup is not None:
|
||||
setup(host, port)
|
||||
|
||||
try:
|
||||
return func(*args, f'http://{host}:{port}', **kwargs)
|
||||
finally:
|
||||
server.shutdown()
|
||||
server.server_close()
|
||||
t.join()
|
||||
|
||||
return inner
|
||||
|
||||
1
selfdrive/test/longitudinal_maneuvers/.gitignore
vendored
Normal file
1
selfdrive/test/longitudinal_maneuvers/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
out/*
|
||||
0
selfdrive/test/longitudinal_maneuvers/__init__.py
Normal file
0
selfdrive/test/longitudinal_maneuvers/__init__.py
Normal file
71
selfdrive/test/longitudinal_maneuvers/maneuver.py
Normal file
71
selfdrive/test/longitudinal_maneuvers/maneuver.py
Normal file
@@ -0,0 +1,71 @@
|
||||
import numpy as np
|
||||
from openpilot.selfdrive.test.longitudinal_maneuvers.plant import Plant
|
||||
|
||||
|
||||
class Maneuver:
|
||||
def __init__(self, title, duration, **kwargs):
|
||||
# Was tempted to make a builder class
|
||||
self.distance_lead = kwargs.get("initial_distance_lead", 200.0)
|
||||
self.speed = kwargs.get("initial_speed", 0.0)
|
||||
self.lead_relevancy = kwargs.get("lead_relevancy", 0)
|
||||
|
||||
self.breakpoints = kwargs.get("breakpoints", [0.0, duration])
|
||||
self.speed_lead_values = kwargs.get("speed_lead_values", [0.0 for i in range(len(self.breakpoints))])
|
||||
self.prob_lead_values = kwargs.get("prob_lead_values", [1.0 for i in range(len(self.breakpoints))])
|
||||
self.cruise_values = kwargs.get("cruise_values", [50.0 for i in range(len(self.breakpoints))])
|
||||
|
||||
self.only_lead2 = kwargs.get("only_lead2", False)
|
||||
self.only_radar = kwargs.get("only_radar", False)
|
||||
self.ensure_start = kwargs.get("ensure_start", False)
|
||||
self.enabled = kwargs.get("enabled", True)
|
||||
self.e2e = kwargs.get("e2e", False)
|
||||
self.force_decel = kwargs.get("force_decel", False)
|
||||
|
||||
self.duration = duration
|
||||
self.title = title
|
||||
|
||||
def evaluate(self):
|
||||
plant = Plant(
|
||||
lead_relevancy=self.lead_relevancy,
|
||||
speed=self.speed,
|
||||
distance_lead=self.distance_lead,
|
||||
enabled=self.enabled,
|
||||
only_lead2=self.only_lead2,
|
||||
only_radar=self.only_radar,
|
||||
e2e=self.e2e,
|
||||
force_decel=self.force_decel,
|
||||
)
|
||||
|
||||
valid = True
|
||||
logs = []
|
||||
while plant.current_time < self.duration:
|
||||
speed_lead = np.interp(plant.current_time, self.breakpoints, self.speed_lead_values)
|
||||
prob = np.interp(plant.current_time, self.breakpoints, self.prob_lead_values)
|
||||
cruise = np.interp(plant.current_time, self.breakpoints, self.cruise_values)
|
||||
log = plant.step(speed_lead, prob, cruise)
|
||||
|
||||
d_rel = log['distance_lead'] - log['distance'] if self.lead_relevancy else 200.
|
||||
v_rel = speed_lead - log['speed'] if self.lead_relevancy else 0.
|
||||
log['d_rel'] = d_rel
|
||||
log['v_rel'] = v_rel
|
||||
logs.append(np.array([plant.current_time,
|
||||
log['distance'],
|
||||
log['distance_lead'],
|
||||
log['speed'],
|
||||
speed_lead,
|
||||
log['acceleration']]))
|
||||
|
||||
if d_rel < .4 and (self.only_radar or prob > 0.5):
|
||||
print("Crashed!!!!")
|
||||
valid = False
|
||||
|
||||
if self.ensure_start and log['v_rel'] > 0 and log['speeds'][-1] <= 0.1:
|
||||
print('LongitudinalPlanner not starting!')
|
||||
valid = False
|
||||
if self.force_decel and log['speed'] > 1e-1 and log['acceleration'] > -0.04:
|
||||
print('Not stopping with force decel')
|
||||
valid = False
|
||||
|
||||
|
||||
print("maneuver end", valid)
|
||||
return valid, np.array(logs)
|
||||
172
selfdrive/test/longitudinal_maneuvers/plant.py
Normal file
172
selfdrive/test/longitudinal_maneuvers/plant.py
Normal file
@@ -0,0 +1,172 @@
|
||||
#!/usr/bin/env python3
|
||||
import time
|
||||
import numpy as np
|
||||
|
||||
from cereal import log
|
||||
import cereal.messaging as messaging
|
||||
from openpilot.common.realtime import Ratekeeper, DT_MDL
|
||||
from openpilot.selfdrive.controls.lib.longcontrol import LongCtrlState
|
||||
from openpilot.selfdrive.modeld.constants import ModelConstants
|
||||
from openpilot.selfdrive.controls.lib.longitudinal_planner import LongitudinalPlanner
|
||||
from openpilot.selfdrive.controls.radard import _LEAD_ACCEL_TAU
|
||||
|
||||
|
||||
class Plant:
|
||||
messaging_initialized = False
|
||||
|
||||
def __init__(self, lead_relevancy=False, speed=0.0, distance_lead=2.0,
|
||||
enabled=True, only_lead2=False, only_radar=False, e2e=False, force_decel=False):
|
||||
self.rate = 1. / DT_MDL
|
||||
|
||||
if not Plant.messaging_initialized:
|
||||
Plant.radar = messaging.pub_sock('radarState')
|
||||
Plant.controls_state = messaging.pub_sock('controlsState')
|
||||
Plant.car_state = messaging.pub_sock('carState')
|
||||
Plant.plan = messaging.sub_sock('longitudinalPlan')
|
||||
Plant.messaging_initialized = True
|
||||
|
||||
self.v_lead_prev = 0.0
|
||||
|
||||
self.distance = 0.
|
||||
self.speed = speed
|
||||
self.acceleration = 0.0
|
||||
self.speeds = []
|
||||
|
||||
# lead car
|
||||
self.lead_relevancy = lead_relevancy
|
||||
self.distance_lead = distance_lead
|
||||
self.enabled = enabled
|
||||
self.only_lead2 = only_lead2
|
||||
self.only_radar = only_radar
|
||||
self.e2e = e2e
|
||||
self.force_decel = force_decel
|
||||
|
||||
self.rk = Ratekeeper(self.rate, print_delay_threshold=100.0)
|
||||
self.ts = 1. / self.rate
|
||||
time.sleep(0.1)
|
||||
self.sm = messaging.SubMaster(['longitudinalPlan'])
|
||||
|
||||
from openpilot.selfdrive.car.honda.values import CAR
|
||||
from openpilot.selfdrive.car.honda.interface import CarInterface
|
||||
|
||||
self.planner = LongitudinalPlanner(CarInterface.get_non_essential_params(CAR.CIVIC), init_v=self.speed)
|
||||
|
||||
@property
|
||||
def current_time(self):
|
||||
return float(self.rk.frame) / self.rate
|
||||
|
||||
def step(self, v_lead=0.0, prob=1.0, v_cruise=50.):
|
||||
# ******** publish a fake model going straight and fake calibration ********
|
||||
# note that this is worst case for MPC, since model will delay long mpc by one time step
|
||||
radar = messaging.new_message('radarState')
|
||||
control = messaging.new_message('controlsState')
|
||||
car_state = messaging.new_message('carState')
|
||||
model = messaging.new_message('modelV2')
|
||||
a_lead = (v_lead - self.v_lead_prev)/self.ts
|
||||
self.v_lead_prev = v_lead
|
||||
|
||||
if self.lead_relevancy:
|
||||
d_rel = np.maximum(0., self.distance_lead - self.distance)
|
||||
v_rel = v_lead - self.speed
|
||||
if self.only_radar:
|
||||
status = True
|
||||
elif prob > .5:
|
||||
status = True
|
||||
else:
|
||||
status = False
|
||||
else:
|
||||
d_rel = 200.
|
||||
v_rel = 0.
|
||||
prob = 0.0
|
||||
status = False
|
||||
|
||||
lead = log.RadarState.LeadData.new_message()
|
||||
lead.dRel = float(d_rel)
|
||||
lead.yRel = float(0.0)
|
||||
lead.vRel = float(v_rel)
|
||||
lead.aRel = float(a_lead - self.acceleration)
|
||||
lead.vLead = float(v_lead)
|
||||
lead.vLeadK = float(v_lead)
|
||||
lead.aLeadK = float(a_lead)
|
||||
# TODO use real radard logic for this
|
||||
lead.aLeadTau = float(_LEAD_ACCEL_TAU)
|
||||
lead.status = status
|
||||
lead.modelProb = float(prob)
|
||||
if not self.only_lead2:
|
||||
radar.radarState.leadOne = lead
|
||||
radar.radarState.leadTwo = lead
|
||||
|
||||
# Simulate model predicting slightly faster speed
|
||||
# this is to ensure lead policy is effective when model
|
||||
# does not predict slowdown in e2e mode
|
||||
position = log.XYZTData.new_message()
|
||||
position.x = [float(x) for x in (self.speed + 0.5) * np.array(ModelConstants.T_IDXS)]
|
||||
model.modelV2.position = position
|
||||
velocity = log.XYZTData.new_message()
|
||||
velocity.x = [float(x) for x in (self.speed + 0.5) * np.ones_like(ModelConstants.T_IDXS)]
|
||||
model.modelV2.velocity = velocity
|
||||
acceleration = log.XYZTData.new_message()
|
||||
acceleration.x = [float(x) for x in np.zeros_like(ModelConstants.T_IDXS)]
|
||||
model.modelV2.acceleration = acceleration
|
||||
|
||||
control.controlsState.longControlState = LongCtrlState.pid if self.enabled else LongCtrlState.off
|
||||
control.controlsState.vCruise = float(v_cruise * 3.6)
|
||||
control.controlsState.experimentalMode = self.e2e
|
||||
control.controlsState.forceDecel = self.force_decel
|
||||
car_state.carState.vEgo = float(self.speed)
|
||||
car_state.carState.standstill = self.speed < 0.01
|
||||
|
||||
# ******** get controlsState messages for plotting ***
|
||||
sm = {'radarState': radar.radarState,
|
||||
'carState': car_state.carState,
|
||||
'controlsState': control.controlsState,
|
||||
'modelV2': model.modelV2}
|
||||
self.planner.update(sm)
|
||||
self.speed = self.planner.v_desired_filter.x
|
||||
self.acceleration = self.planner.a_desired
|
||||
self.speeds = self.planner.v_desired_trajectory.tolist()
|
||||
fcw = self.planner.fcw
|
||||
self.distance_lead = self.distance_lead + v_lead * self.ts
|
||||
|
||||
# ******** run the car ********
|
||||
#print(self.distance, speed)
|
||||
if self.speed <= 0:
|
||||
self.speed = 0
|
||||
self.acceleration = 0
|
||||
self.distance = self.distance + self.speed * self.ts
|
||||
|
||||
# *** radar model ***
|
||||
if self.lead_relevancy:
|
||||
d_rel = np.maximum(0., self.distance_lead - self.distance)
|
||||
v_rel = v_lead - self.speed
|
||||
else:
|
||||
d_rel = 200.
|
||||
v_rel = 0.
|
||||
|
||||
# print at 5hz
|
||||
# if (self.rk.frame % (self.rate // 5)) == 0:
|
||||
# print("%2.2f sec %6.2f m %6.2f m/s %6.2f m/s2 lead_rel: %6.2f m %6.2f m/s"
|
||||
# % (self.current_time, self.distance, self.speed, self.acceleration, d_rel, v_rel))
|
||||
|
||||
|
||||
# ******** update prevs ********
|
||||
self.rk.monitor_time()
|
||||
|
||||
return {
|
||||
"distance": self.distance,
|
||||
"speed": self.speed,
|
||||
"acceleration": self.acceleration,
|
||||
"speeds": self.speeds,
|
||||
"distance_lead": self.distance_lead,
|
||||
"fcw": fcw,
|
||||
}
|
||||
|
||||
# simple engage in standalone mode
|
||||
def plant_thread():
|
||||
plant = Plant()
|
||||
while 1:
|
||||
plant.step()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
plant_thread()
|
||||
160
selfdrive/test/longitudinal_maneuvers/test_longitudinal.py
Normal file
160
selfdrive/test/longitudinal_maneuvers/test_longitudinal.py
Normal file
@@ -0,0 +1,160 @@
|
||||
#!/usr/bin/env python3
|
||||
import itertools
|
||||
import unittest
|
||||
from parameterized import parameterized_class
|
||||
|
||||
from openpilot.selfdrive.controls.lib.longitudinal_mpc_lib.long_mpc import STOP_DISTANCE
|
||||
from openpilot.selfdrive.test.longitudinal_maneuvers.maneuver import Maneuver
|
||||
|
||||
|
||||
# TODO: make new FCW tests
|
||||
def create_maneuvers(kwargs):
|
||||
maneuvers = [
|
||||
Maneuver(
|
||||
'approach stopped car at 25m/s, initial distance: 120m',
|
||||
duration=20.,
|
||||
initial_speed=25.,
|
||||
lead_relevancy=True,
|
||||
initial_distance_lead=120.,
|
||||
speed_lead_values=[30., 0.],
|
||||
breakpoints=[0., 1.],
|
||||
**kwargs,
|
||||
),
|
||||
Maneuver(
|
||||
'approach stopped car at 20m/s, initial distance 90m',
|
||||
duration=20.,
|
||||
initial_speed=20.,
|
||||
lead_relevancy=True,
|
||||
initial_distance_lead=90.,
|
||||
speed_lead_values=[20., 0.],
|
||||
breakpoints=[0., 1.],
|
||||
**kwargs,
|
||||
),
|
||||
Maneuver(
|
||||
'steady state following a car at 20m/s, then lead decel to 0mph at 1m/s^2',
|
||||
duration=50.,
|
||||
initial_speed=20.,
|
||||
lead_relevancy=True,
|
||||
initial_distance_lead=35.,
|
||||
speed_lead_values=[20., 20., 0.],
|
||||
breakpoints=[0., 15., 35.0],
|
||||
**kwargs,
|
||||
),
|
||||
Maneuver(
|
||||
'steady state following a car at 20m/s, then lead decel to 0mph at 2m/s^2',
|
||||
duration=50.,
|
||||
initial_speed=20.,
|
||||
lead_relevancy=True,
|
||||
initial_distance_lead=35.,
|
||||
speed_lead_values=[20., 20., 0.],
|
||||
breakpoints=[0., 15., 25.0],
|
||||
**kwargs,
|
||||
),
|
||||
Maneuver(
|
||||
'steady state following a car at 20m/s, then lead decel to 0mph at 3m/s^2',
|
||||
duration=50.,
|
||||
initial_speed=20.,
|
||||
lead_relevancy=True,
|
||||
initial_distance_lead=35.,
|
||||
speed_lead_values=[20., 20., 0.],
|
||||
breakpoints=[0., 15., 21.66],
|
||||
**kwargs,
|
||||
),
|
||||
Maneuver(
|
||||
'steady state following a car at 20m/s, then lead decel to 0mph at 3+m/s^2',
|
||||
duration=40.,
|
||||
initial_speed=20.,
|
||||
lead_relevancy=True,
|
||||
initial_distance_lead=35.,
|
||||
speed_lead_values=[20., 20., 0.],
|
||||
prob_lead_values=[0., 1., 1.],
|
||||
cruise_values=[20., 20., 20.],
|
||||
breakpoints=[2., 2.01, 8.8],
|
||||
**kwargs,
|
||||
),
|
||||
Maneuver(
|
||||
"approach stopped car at 20m/s, with prob_lead_values",
|
||||
duration=30.,
|
||||
initial_speed=20.,
|
||||
lead_relevancy=True,
|
||||
initial_distance_lead=120.,
|
||||
speed_lead_values=[0.0, 0., 0.],
|
||||
prob_lead_values=[0.0, 0., 1.],
|
||||
cruise_values=[20., 20., 20.],
|
||||
breakpoints=[0.0, 2., 2.01],
|
||||
**kwargs,
|
||||
),
|
||||
Maneuver(
|
||||
"approach slower cut-in car at 20m/s",
|
||||
duration=20.,
|
||||
initial_speed=20.,
|
||||
lead_relevancy=True,
|
||||
initial_distance_lead=50.,
|
||||
speed_lead_values=[15., 15.],
|
||||
breakpoints=[1., 11.],
|
||||
only_lead2=True,
|
||||
**kwargs,
|
||||
),
|
||||
Maneuver(
|
||||
"stay stopped behind radar override lead",
|
||||
duration=20.,
|
||||
initial_speed=0.,
|
||||
lead_relevancy=True,
|
||||
initial_distance_lead=10.,
|
||||
speed_lead_values=[0., 0.],
|
||||
prob_lead_values=[0., 0.],
|
||||
breakpoints=[1., 11.],
|
||||
only_radar=True,
|
||||
**kwargs,
|
||||
),
|
||||
Maneuver(
|
||||
"NaN recovery",
|
||||
duration=30.,
|
||||
initial_speed=15.,
|
||||
lead_relevancy=True,
|
||||
initial_distance_lead=60.,
|
||||
speed_lead_values=[0., 0., 0.0],
|
||||
breakpoints=[1., 1.01, 11.],
|
||||
cruise_values=[float("nan"), 15., 15.],
|
||||
**kwargs,
|
||||
),
|
||||
Maneuver(
|
||||
'cruising at 25 m/s while disabled',
|
||||
duration=20.,
|
||||
initial_speed=25.,
|
||||
lead_relevancy=False,
|
||||
enabled=False,
|
||||
**kwargs,
|
||||
),
|
||||
]
|
||||
if not kwargs['force_decel']:
|
||||
# controls relies on planner commanding to move for stock-ACC resume spamming
|
||||
maneuvers.append(Maneuver(
|
||||
"resume from a stop",
|
||||
duration=20.,
|
||||
initial_speed=0.,
|
||||
lead_relevancy=True,
|
||||
initial_distance_lead=STOP_DISTANCE,
|
||||
speed_lead_values=[0., 0., 2.],
|
||||
breakpoints=[1., 10., 15.],
|
||||
ensure_start=True,
|
||||
**kwargs,
|
||||
))
|
||||
return maneuvers
|
||||
|
||||
|
||||
@parameterized_class(("e2e", "force_decel"), itertools.product([True, False], repeat=2))
|
||||
class LongitudinalControl(unittest.TestCase):
|
||||
e2e: bool
|
||||
force_decel: bool
|
||||
|
||||
def test_maneuver(self):
|
||||
for maneuver in create_maneuvers({"e2e": self.e2e, "force_decel": self.force_decel}):
|
||||
with self.subTest(title=maneuver.title, e2e=maneuver.e2e, force_decel=maneuver.force_decel):
|
||||
print(maneuver.title, f'in {"e2e" if maneuver.e2e else "acc"} mode')
|
||||
valid, _ = maneuver.evaluate()
|
||||
self.assertTrue(valid)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main(failfast=True)
|
||||
8
selfdrive/test/loop_until_fail.sh
Normal file
8
selfdrive/test/loop_until_fail.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
# Loop something forever until it fails, for verifying new tests
|
||||
|
||||
while true; do
|
||||
$@
|
||||
done
|
||||
2
selfdrive/test/process_replay/.gitignore
vendored
Normal file
2
selfdrive/test/process_replay/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
fakedata/
|
||||
debayer_diff.txt
|
||||
126
selfdrive/test/process_replay/README.md
Normal file
126
selfdrive/test/process_replay/README.md
Normal file
@@ -0,0 +1,126 @@
|
||||
# Process replay
|
||||
|
||||
Process replay is a regression test designed to identify any changes in the output of a process. This test replays a segment through individual processes and compares the output to a known good replay. Each make is represented in the test with a segment.
|
||||
|
||||
If the test fails, make sure that you didn't unintentionally change anything. If there are intentional changes, the reference logs will be updated.
|
||||
|
||||
Use `test_processes.py` to run the test locally.
|
||||
Use `FILEREADER_CACHE='1' test_processes.py` to cache log files.
|
||||
|
||||
Currently the following processes are tested:
|
||||
|
||||
* controlsd
|
||||
* radard
|
||||
* plannerd
|
||||
* calibrationd
|
||||
* dmonitoringd
|
||||
* locationd
|
||||
* paramsd
|
||||
* ubloxd
|
||||
* torqued
|
||||
|
||||
### Usage
|
||||
```
|
||||
Usage: test_processes.py [-h] [--whitelist-procs PROCS] [--whitelist-cars CARS] [--blacklist-procs PROCS]
|
||||
[--blacklist-cars CARS] [--ignore-fields FIELDS] [--ignore-msgs MSGS] [--update-refs] [--upload-only]
|
||||
Regression test to identify changes in a process's output
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--whitelist-procs PROCS Whitelist given processes from the test (e.g. controlsd)
|
||||
--whitelist-cars WHITELIST_CARS Whitelist given cars from the test (e.g. HONDA)
|
||||
--blacklist-procs BLACKLIST_PROCS Blacklist given processes from the test (e.g. controlsd)
|
||||
--blacklist-cars BLACKLIST_CARS Blacklist given cars from the test (e.g. HONDA)
|
||||
--ignore-fields IGNORE_FIELDS Extra fields or msgs to ignore (e.g. carState.events)
|
||||
--ignore-msgs IGNORE_MSGS Msgs to ignore (e.g. onroadEvents)
|
||||
--update-refs Updates reference logs using current commit
|
||||
--upload-only Skips testing processes and uploads logs from previous test run
|
||||
```
|
||||
|
||||
## Forks
|
||||
|
||||
openpilot forks can use this test with their own reference logs, by default `test_proccess.py` saves logs locally.
|
||||
|
||||
To generate new logs:
|
||||
|
||||
`./test_processes.py`
|
||||
|
||||
Then, check in the new logs using git-lfs. Make sure to also update the `ref_commit` file to the current commit.
|
||||
|
||||
## API
|
||||
|
||||
Process replay test suite exposes programmatic APIs for simultaneously running processes or groups of processes on provided logs.
|
||||
|
||||
```py
|
||||
def replay_process_with_name(name: Union[str, Iterable[str]], lr: LogIterable, *args, **kwargs) -> List[capnp._DynamicStructReader]:
|
||||
|
||||
def replay_process(
|
||||
cfg: Union[ProcessConfig, Iterable[ProcessConfig]], lr: LogIterable, frs: Optional[Dict[str, Any]] = None,
|
||||
fingerprint: Optional[str] = None, return_all_logs: bool = False, custom_params: Optional[Dict[str, Any]] = None, disable_progress: bool = False
|
||||
) -> List[capnp._DynamicStructReader]:
|
||||
```
|
||||
|
||||
Example usage:
|
||||
```py
|
||||
from openpilot.selfdrive.test.process_replay import replay_process_with_name
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
|
||||
lr = LogReader(...)
|
||||
|
||||
# provide a name of the process to replay
|
||||
output_logs = replay_process_with_name('locationd', lr)
|
||||
|
||||
# or list of names
|
||||
output_logs = replay_process_with_name(['ubloxd', 'locationd'], lr)
|
||||
```
|
||||
|
||||
Supported processes:
|
||||
* controlsd
|
||||
* radard
|
||||
* plannerd
|
||||
* calibrationd
|
||||
* dmonitoringd
|
||||
* locationd
|
||||
* paramsd
|
||||
* ubloxd
|
||||
* torqued
|
||||
* modeld
|
||||
* dmonitoringmodeld
|
||||
|
||||
Certain processes may require an initial state, which is usually supplied within `Params` and persisting from segment to segment (e.g CalibrationParams, LiveParameters). The `custom_params` is dictionary used to prepopulate `Params` with arbitrary values. The `get_custom_params_from_lr` helper is provided to fetch meaningful values from log files.
|
||||
|
||||
```py
|
||||
from openpilot.selfdrive.test.process_replay import get_custom_params_from_lr
|
||||
|
||||
previous_segment_lr = LogReader(...)
|
||||
current_segment_lr = LogReader(...)
|
||||
|
||||
custom_params = get_custom_params_from_lr(previous_segment_lr, 'last')
|
||||
|
||||
output_logs = replay_process_with_name('calibrationd', lr, custom_params=custom_params)
|
||||
```
|
||||
|
||||
Replaying processes that use VisionIPC (e.g. modeld, dmonitoringmodeld) require additional `frs` dictionary with camera states as keys and `FrameReader` objects as values.
|
||||
|
||||
```py
|
||||
from openpilot.tools.lib.framereader import FrameReader
|
||||
|
||||
frs = {
|
||||
'roadCameraState': FrameReader(...),
|
||||
'wideRoadCameraState': FrameReader(...),
|
||||
'driverCameraState': FrameReader(...),
|
||||
}
|
||||
|
||||
output_logs = replay_process_with_name(['modeld', 'dmonitoringmodeld'], lr, frs=frs)
|
||||
```
|
||||
|
||||
To capture stdout/stderr of the replayed process, `captured_output_store` can be provided.
|
||||
|
||||
```py
|
||||
output_store = dict()
|
||||
# pass dictionary by reference, it will be filled with standard outputs - even if process replay fails
|
||||
output_logs = replay_process_with_name(['radard', 'plannerd'], lr, captured_output_store=output_store)
|
||||
|
||||
# entries with captured output in format { 'out': '...', 'err': '...' } will be added to provided dictionary for each replayed process
|
||||
print(output_store['radard']['out']) # radard stdout
|
||||
print(output_store['radard']['err']) # radard stderr
|
||||
```
|
||||
2
selfdrive/test/process_replay/__init__.py
Normal file
2
selfdrive/test/process_replay/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
from openpilot.selfdrive.test.process_replay.process_replay import CONFIGS, get_process_config, get_custom_params_from_lr, \
|
||||
replay_process, replay_process_with_name # noqa: F401
|
||||
59
selfdrive/test/process_replay/capture.py
Normal file
59
selfdrive/test/process_replay/capture.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import os
|
||||
import sys
|
||||
|
||||
from typing import no_type_check
|
||||
|
||||
class FdRedirect:
|
||||
def __init__(self, file_prefix: str, fd: int):
|
||||
fname = os.path.join("/tmp", f"{file_prefix}.{fd}")
|
||||
if os.path.exists(fname):
|
||||
os.unlink(fname)
|
||||
self.dest_fd = os.open(fname, os.O_WRONLY | os.O_CREAT)
|
||||
self.dest_fname = fname
|
||||
self.source_fd = fd
|
||||
os.set_inheritable(self.dest_fd, True)
|
||||
|
||||
def __del__(self):
|
||||
os.close(self.dest_fd)
|
||||
|
||||
def purge(self) -> None:
|
||||
os.unlink(self.dest_fname)
|
||||
|
||||
def read(self) -> bytes:
|
||||
with open(self.dest_fname, "rb") as f:
|
||||
return f.read() or b""
|
||||
|
||||
def link(self) -> None:
|
||||
os.dup2(self.dest_fd, self.source_fd)
|
||||
|
||||
|
||||
class ProcessOutputCapture:
|
||||
def __init__(self, proc_name: str, prefix: str):
|
||||
prefix = f"{proc_name}_{prefix}"
|
||||
self.stdout_redirect = FdRedirect(prefix, 1)
|
||||
self.stderr_redirect = FdRedirect(prefix, 2)
|
||||
|
||||
def __del__(self):
|
||||
self.stdout_redirect.purge()
|
||||
self.stderr_redirect.purge()
|
||||
|
||||
@no_type_check # ipython classes have incompatible signatures
|
||||
def link_with_current_proc(self) -> None:
|
||||
try:
|
||||
# prevent ipykernel from redirecting stdout/stderr of python subprocesses
|
||||
from ipykernel.iostream import OutStream
|
||||
if isinstance(sys.stdout, OutStream):
|
||||
sys.stdout = sys.__stdout__
|
||||
if isinstance(sys.stderr, OutStream):
|
||||
sys.stderr = sys.__stderr__
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# link stdout/stderr to the fifo
|
||||
self.stdout_redirect.link()
|
||||
self.stderr_redirect.link()
|
||||
|
||||
def read_outerr(self) -> tuple[str, str]:
|
||||
out_str = self.stdout_redirect.read().decode()
|
||||
err_str = self.stderr_redirect.read().decode()
|
||||
return out_str, err_str
|
||||
150
selfdrive/test/process_replay/compare_logs.py
Normal file
150
selfdrive/test/process_replay/compare_logs.py
Normal file
@@ -0,0 +1,150 @@
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import math
|
||||
import capnp
|
||||
import numbers
|
||||
import dictdiffer
|
||||
from collections import Counter
|
||||
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
|
||||
EPSILON = sys.float_info.epsilon
|
||||
|
||||
|
||||
def remove_ignored_fields(msg, ignore):
|
||||
msg = msg.as_builder()
|
||||
for key in ignore:
|
||||
attr = msg
|
||||
keys = key.split(".")
|
||||
if msg.which() != keys[0] and len(keys) > 1:
|
||||
continue
|
||||
|
||||
for k in keys[:-1]:
|
||||
# indexing into list
|
||||
if k.isdigit():
|
||||
attr = attr[int(k)]
|
||||
else:
|
||||
attr = getattr(attr, k)
|
||||
|
||||
v = getattr(attr, keys[-1])
|
||||
if isinstance(v, bool):
|
||||
val = False
|
||||
elif isinstance(v, numbers.Number):
|
||||
val = 0
|
||||
elif isinstance(v, (list, capnp.lib.capnp._DynamicListBuilder)):
|
||||
val = []
|
||||
else:
|
||||
raise NotImplementedError(f"Unknown type: {type(v)}")
|
||||
setattr(attr, keys[-1], val)
|
||||
return msg
|
||||
|
||||
|
||||
def compare_logs(log1, log2, ignore_fields=None, ignore_msgs=None, tolerance=None,):
|
||||
if ignore_fields is None:
|
||||
ignore_fields = []
|
||||
if ignore_msgs is None:
|
||||
ignore_msgs = []
|
||||
tolerance = EPSILON if tolerance is None else tolerance
|
||||
|
||||
log1, log2 = (
|
||||
[m for m in log if m.which() not in ignore_msgs]
|
||||
for log in (log1, log2)
|
||||
)
|
||||
|
||||
if len(log1) != len(log2):
|
||||
cnt1 = Counter(m.which() for m in log1)
|
||||
cnt2 = Counter(m.which() for m in log2)
|
||||
raise Exception(f"logs are not same length: {len(log1)} VS {len(log2)}\n\t\t{cnt1}\n\t\t{cnt2}")
|
||||
|
||||
diff = []
|
||||
for msg1, msg2 in zip(log1, log2, strict=True):
|
||||
if msg1.which() != msg2.which():
|
||||
raise Exception("msgs not aligned between logs")
|
||||
|
||||
msg1 = remove_ignored_fields(msg1, ignore_fields)
|
||||
msg2 = remove_ignored_fields(msg2, ignore_fields)
|
||||
|
||||
if msg1.to_bytes() != msg2.to_bytes():
|
||||
msg1_dict = msg1.as_reader().to_dict(verbose=True)
|
||||
msg2_dict = msg2.as_reader().to_dict(verbose=True)
|
||||
|
||||
dd = dictdiffer.diff(msg1_dict, msg2_dict, ignore=ignore_fields)
|
||||
|
||||
# Dictdiffer only supports relative tolerance, we also want to check for absolute
|
||||
# TODO: add this to dictdiffer
|
||||
def outside_tolerance(diff):
|
||||
try:
|
||||
if diff[0] == "change":
|
||||
a, b = diff[2]
|
||||
finite = math.isfinite(a) and math.isfinite(b)
|
||||
if finite and isinstance(a, numbers.Number) and isinstance(b, numbers.Number):
|
||||
return abs(a - b) > max(tolerance, tolerance * max(abs(a), abs(b)))
|
||||
except TypeError:
|
||||
pass
|
||||
return True
|
||||
|
||||
dd = list(filter(outside_tolerance, dd))
|
||||
|
||||
diff.extend(dd)
|
||||
return diff
|
||||
|
||||
|
||||
def format_process_diff(diff):
|
||||
diff_short, diff_long = "", ""
|
||||
|
||||
if isinstance(diff, str):
|
||||
diff_short += f" {diff}\n"
|
||||
diff_long += f"\t{diff}\n"
|
||||
else:
|
||||
cnt: dict[str, int] = {}
|
||||
for d in diff:
|
||||
diff_long += f"\t{str(d)}\n"
|
||||
|
||||
k = str(d[1])
|
||||
cnt[k] = 1 if k not in cnt else cnt[k] + 1
|
||||
|
||||
for k, v in sorted(cnt.items()):
|
||||
diff_short += f" {k}: {v}\n"
|
||||
|
||||
return diff_short, diff_long
|
||||
|
||||
|
||||
def format_diff(results, log_paths, ref_commit):
|
||||
diff_short, diff_long = "", ""
|
||||
diff_long += f"***** tested against commit {ref_commit} *****\n"
|
||||
|
||||
failed = False
|
||||
for segment, result in list(results.items()):
|
||||
diff_short += f"***** results for segment {segment} *****\n"
|
||||
diff_long += f"***** differences for segment {segment} *****\n"
|
||||
|
||||
for proc, diff in list(result.items()):
|
||||
diff_long += f"*** process: {proc} ***\n"
|
||||
diff_long += f"\tref: {log_paths[segment][proc]['ref']}\n"
|
||||
diff_long += f"\tnew: {log_paths[segment][proc]['new']}\n\n"
|
||||
|
||||
diff_short += f" {proc}\n"
|
||||
|
||||
if isinstance(diff, str) or len(diff):
|
||||
diff_short += f" ref: {log_paths[segment][proc]['ref']}\n"
|
||||
diff_short += f" new: {log_paths[segment][proc]['new']}\n\n"
|
||||
failed = True
|
||||
|
||||
proc_diff_short, proc_diff_long = format_process_diff(diff)
|
||||
|
||||
diff_long += proc_diff_long
|
||||
diff_short += proc_diff_short
|
||||
|
||||
return diff_short, diff_long, failed
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
log1 = list(LogReader(sys.argv[1]))
|
||||
log2 = list(LogReader(sys.argv[2]))
|
||||
ignore_fields = sys.argv[3:] or ["logMonoTime", "controlsState.startMonoTime", "controlsState.cumLagMs"]
|
||||
results = {"segment": {"proc": compare_logs(log1, log2, ignore_fields)}}
|
||||
log_paths = {"segment": {"proc": {"ref": sys.argv[1], "new": sys.argv[2]}}}
|
||||
diff_short, diff_long, failed = format_diff(results, log_paths, None)
|
||||
|
||||
print(diff_long)
|
||||
print(diff_short)
|
||||
1
selfdrive/test/process_replay/debayer_replay_ref_commit
Normal file
1
selfdrive/test/process_replay/debayer_replay_ref_commit
Normal file
@@ -0,0 +1 @@
|
||||
8f9ba7540b4549b4a57312129b8ff678d045f70f
|
||||
203
selfdrive/test/process_replay/migration.py
Normal file
203
selfdrive/test/process_replay/migration.py
Normal file
@@ -0,0 +1,203 @@
|
||||
from collections import defaultdict
|
||||
|
||||
from cereal import messaging
|
||||
from openpilot.selfdrive.test.process_replay.vision_meta import meta_from_encode_index
|
||||
from openpilot.selfdrive.car.toyota.values import EPS_SCALE
|
||||
from openpilot.selfdrive.manager.process_config import managed_processes
|
||||
from panda import Panda
|
||||
|
||||
|
||||
def migrate_all(lr, old_logtime=False, manager_states=False, panda_states=False, camera_states=False):
|
||||
msgs = migrate_sensorEvents(lr, old_logtime)
|
||||
msgs = migrate_carParams(msgs, old_logtime)
|
||||
if manager_states:
|
||||
msgs = migrate_managerState(msgs)
|
||||
if panda_states:
|
||||
msgs = migrate_pandaStates(msgs)
|
||||
msgs = migrate_peripheralState(msgs)
|
||||
if camera_states:
|
||||
msgs = migrate_cameraStates(msgs)
|
||||
|
||||
return msgs
|
||||
|
||||
|
||||
def migrate_managerState(lr):
|
||||
all_msgs = []
|
||||
for msg in lr:
|
||||
if msg.which() != "managerState":
|
||||
all_msgs.append(msg)
|
||||
continue
|
||||
|
||||
new_msg = msg.as_builder()
|
||||
new_msg.managerState.processes = [{'name': name, 'running': True} for name in managed_processes]
|
||||
all_msgs.append(new_msg.as_reader())
|
||||
|
||||
return all_msgs
|
||||
|
||||
|
||||
def migrate_pandaStates(lr):
|
||||
all_msgs = []
|
||||
# TODO: safety param migration should be handled automatically
|
||||
safety_param_migration = {
|
||||
"TOYOTA PRIUS 2017": EPS_SCALE["TOYOTA PRIUS 2017"] | Panda.FLAG_TOYOTA_STOCK_LONGITUDINAL,
|
||||
"TOYOTA RAV4 2017": EPS_SCALE["TOYOTA RAV4 2017"] | Panda.FLAG_TOYOTA_ALT_BRAKE | Panda.FLAG_TOYOTA_GAS_INTERCEPTOR,
|
||||
"KIA EV6 2022": Panda.FLAG_HYUNDAI_EV_GAS | Panda.FLAG_HYUNDAI_CANFD_HDA2,
|
||||
}
|
||||
|
||||
# Migrate safety param base on carState
|
||||
CP = next((m.carParams for m in lr if m.which() == 'carParams'), None)
|
||||
assert CP is not None, "carParams message not found"
|
||||
if CP.carFingerprint in safety_param_migration:
|
||||
safety_param = safety_param_migration[CP.carFingerprint]
|
||||
elif len(CP.safetyConfigs):
|
||||
safety_param = CP.safetyConfigs[0].safetyParam
|
||||
if CP.safetyConfigs[0].safetyParamDEPRECATED != 0:
|
||||
safety_param = CP.safetyConfigs[0].safetyParamDEPRECATED
|
||||
else:
|
||||
safety_param = CP.safetyParamDEPRECATED
|
||||
|
||||
for msg in lr:
|
||||
if msg.which() == 'pandaStateDEPRECATED':
|
||||
new_msg = messaging.new_message('pandaStates', 1)
|
||||
new_msg.valid = msg.valid
|
||||
new_msg.logMonoTime = msg.logMonoTime
|
||||
new_msg.pandaStates[0] = msg.pandaStateDEPRECATED
|
||||
new_msg.pandaStates[0].safetyParam = safety_param
|
||||
all_msgs.append(new_msg.as_reader())
|
||||
elif msg.which() == 'pandaStates':
|
||||
new_msg = msg.as_builder()
|
||||
new_msg.pandaStates[-1].safetyParam = safety_param
|
||||
all_msgs.append(new_msg.as_reader())
|
||||
else:
|
||||
all_msgs.append(msg)
|
||||
|
||||
return all_msgs
|
||||
|
||||
|
||||
def migrate_peripheralState(lr):
|
||||
if any(msg.which() == "peripheralState" for msg in lr):
|
||||
return lr
|
||||
|
||||
all_msg = []
|
||||
for msg in lr:
|
||||
all_msg.append(msg)
|
||||
if msg.which() not in ["pandaStates", "pandaStateDEPRECATED"]:
|
||||
continue
|
||||
|
||||
new_msg = messaging.new_message("peripheralState")
|
||||
new_msg.valid = msg.valid
|
||||
new_msg.logMonoTime = msg.logMonoTime
|
||||
all_msg.append(new_msg.as_reader())
|
||||
|
||||
return all_msg
|
||||
|
||||
|
||||
def migrate_cameraStates(lr):
|
||||
all_msgs = []
|
||||
frame_to_encode_id = defaultdict(dict)
|
||||
# just for encodeId fallback mechanism
|
||||
min_frame_id = defaultdict(lambda: float('inf'))
|
||||
|
||||
for msg in lr:
|
||||
if msg.which() not in ["roadEncodeIdx", "wideRoadEncodeIdx", "driverEncodeIdx"]:
|
||||
continue
|
||||
|
||||
encode_index = getattr(msg, msg.which())
|
||||
meta = meta_from_encode_index(msg.which())
|
||||
|
||||
assert encode_index.segmentId < 1200, f"Encoder index segmentId greater that 1200: {msg.which()} {encode_index.segmentId}"
|
||||
frame_to_encode_id[meta.camera_state][encode_index.frameId] = encode_index.segmentId
|
||||
|
||||
for msg in lr:
|
||||
if msg.which() not in ["roadCameraState", "wideRoadCameraState", "driverCameraState"]:
|
||||
all_msgs.append(msg)
|
||||
continue
|
||||
|
||||
camera_state = getattr(msg, msg.which())
|
||||
min_frame_id[msg.which()] = min(min_frame_id[msg.which()], camera_state.frameId)
|
||||
|
||||
encode_id = frame_to_encode_id[msg.which()].get(camera_state.frameId)
|
||||
if encode_id is None:
|
||||
print(f"Missing encoded frame for camera feed {msg.which()} with frameId: {camera_state.frameId}")
|
||||
if len(frame_to_encode_id[msg.which()]) != 0:
|
||||
continue
|
||||
|
||||
# fallback mechanism for logs without encodeIdx (e.g. logs from before 2022 with dcamera recording disabled)
|
||||
# try to fake encode_id by subtracting lowest frameId
|
||||
encode_id = camera_state.frameId - min_frame_id[msg.which()]
|
||||
print(f"Faking encodeId to {encode_id} for camera feed {msg.which()} with frameId: {camera_state.frameId}")
|
||||
|
||||
new_msg = messaging.new_message(msg.which())
|
||||
new_camera_state = getattr(new_msg, new_msg.which())
|
||||
new_camera_state.frameId = encode_id
|
||||
new_camera_state.encodeId = encode_id
|
||||
# timestampSof was added later so it might be missing on some old segments
|
||||
if camera_state.timestampSof == 0 and camera_state.timestampEof > 25000000:
|
||||
new_camera_state.timestampSof = camera_state.timestampEof - 18000000
|
||||
else:
|
||||
new_camera_state.timestampSof = camera_state.timestampSof
|
||||
new_camera_state.timestampEof = camera_state.timestampEof
|
||||
new_msg.logMonoTime = msg.logMonoTime
|
||||
new_msg.valid = msg.valid
|
||||
|
||||
all_msgs.append(new_msg.as_reader())
|
||||
|
||||
return all_msgs
|
||||
|
||||
|
||||
def migrate_carParams(lr, old_logtime=False):
|
||||
all_msgs = []
|
||||
for msg in lr:
|
||||
if msg.which() == 'carParams':
|
||||
CP = messaging.new_message('carParams')
|
||||
CP.valid = True
|
||||
CP.carParams = msg.carParams.as_builder()
|
||||
for car_fw in CP.carParams.carFw:
|
||||
car_fw.brand = CP.carParams.carName
|
||||
if old_logtime:
|
||||
CP.logMonoTime = msg.logMonoTime
|
||||
msg = CP.as_reader()
|
||||
all_msgs.append(msg)
|
||||
|
||||
return all_msgs
|
||||
|
||||
|
||||
def migrate_sensorEvents(lr, old_logtime=False):
|
||||
all_msgs = []
|
||||
for msg in lr:
|
||||
if msg.which() != 'sensorEventsDEPRECATED':
|
||||
all_msgs.append(msg)
|
||||
continue
|
||||
|
||||
# migrate to split sensor events
|
||||
for evt in msg.sensorEventsDEPRECATED:
|
||||
# build new message for each sensor type
|
||||
sensor_service = ''
|
||||
if evt.which() == 'acceleration':
|
||||
sensor_service = 'accelerometer'
|
||||
elif evt.which() == 'gyro' or evt.which() == 'gyroUncalibrated':
|
||||
sensor_service = 'gyroscope'
|
||||
elif evt.which() == 'light' or evt.which() == 'proximity':
|
||||
sensor_service = 'lightSensor'
|
||||
elif evt.which() == 'magnetic' or evt.which() == 'magneticUncalibrated':
|
||||
sensor_service = 'magnetometer'
|
||||
elif evt.which() == 'temperature':
|
||||
sensor_service = 'temperatureSensor'
|
||||
|
||||
m = messaging.new_message(sensor_service)
|
||||
m.valid = True
|
||||
if old_logtime:
|
||||
m.logMonoTime = msg.logMonoTime
|
||||
|
||||
m_dat = getattr(m, sensor_service)
|
||||
m_dat.version = evt.version
|
||||
m_dat.sensor = evt.sensor
|
||||
m_dat.type = evt.type
|
||||
m_dat.source = evt.source
|
||||
if old_logtime:
|
||||
m_dat.timestamp = evt.timestamp
|
||||
setattr(m_dat, evt.which(), getattr(evt, evt.which()))
|
||||
|
||||
all_msgs.append(m.as_reader())
|
||||
|
||||
return all_msgs
|
||||
249
selfdrive/test/process_replay/model_replay.py
Normal file
249
selfdrive/test/process_replay/model_replay.py
Normal file
@@ -0,0 +1,249 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from typing import Any
|
||||
|
||||
import cereal.messaging as messaging
|
||||
from openpilot.common.params import Params
|
||||
from openpilot.system.hardware import PC
|
||||
from openpilot.selfdrive.manager.process_config import managed_processes
|
||||
from openpilot.tools.lib.openpilotci import BASE_URL, get_url
|
||||
from openpilot.selfdrive.test.process_replay.compare_logs import compare_logs, format_diff
|
||||
from openpilot.selfdrive.test.process_replay.process_replay import get_process_config, replay_process
|
||||
from openpilot.system.version import get_commit
|
||||
from openpilot.tools.lib.framereader import FrameReader
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
from openpilot.tools.lib.helpers import save_log
|
||||
|
||||
TEST_ROUTE = "2f4452b03ccb98f0|2022-12-03--13-45-30"
|
||||
SEGMENT = 6
|
||||
MAX_FRAMES = 100 if PC else 600
|
||||
NAV_FRAMES = 50
|
||||
|
||||
NO_NAV = "NO_NAV" in os.environ
|
||||
NO_MODEL = "NO_MODEL" in os.environ
|
||||
SEND_EXTRA_INPUTS = bool(int(os.getenv("SEND_EXTRA_INPUTS", "0")))
|
||||
|
||||
|
||||
def get_log_fn(ref_commit, test_route):
|
||||
return f"{test_route}_model_tici_{ref_commit}.bz2"
|
||||
|
||||
|
||||
def trim_logs_to_max_frames(logs, max_frames, frs_types, include_all_types):
|
||||
all_msgs = []
|
||||
cam_state_counts = defaultdict(int)
|
||||
# keep adding messages until cam states are equal to MAX_FRAMES
|
||||
for msg in sorted(logs, key=lambda m: m.logMonoTime):
|
||||
all_msgs.append(msg)
|
||||
if msg.which() in frs_types:
|
||||
cam_state_counts[msg.which()] += 1
|
||||
|
||||
if all(cam_state_counts[state] == max_frames for state in frs_types):
|
||||
break
|
||||
|
||||
if len(include_all_types) != 0:
|
||||
other_msgs = [m for m in logs if m.which() in include_all_types]
|
||||
all_msgs.extend(other_msgs)
|
||||
|
||||
return all_msgs
|
||||
|
||||
|
||||
def nav_model_replay(lr):
|
||||
sm = messaging.SubMaster(['navModel', 'navThumbnail', 'mapRenderState'])
|
||||
pm = messaging.PubMaster(['liveLocationKalman', 'navRoute'])
|
||||
|
||||
nav = [m for m in lr if m.which() == 'navRoute']
|
||||
llk = [m for m in lr if m.which() == 'liveLocationKalman']
|
||||
assert len(nav) > 0 and len(llk) >= NAV_FRAMES and nav[0].logMonoTime < llk[-NAV_FRAMES].logMonoTime
|
||||
|
||||
log_msgs = []
|
||||
try:
|
||||
assert "MAPBOX_TOKEN" in os.environ
|
||||
os.environ['MAP_RENDER_TEST_MODE'] = '1'
|
||||
Params().put_bool('DmModelInitialized', True)
|
||||
managed_processes['mapsd'].start()
|
||||
managed_processes['navmodeld'].start()
|
||||
|
||||
# setup position and route
|
||||
for _ in range(10):
|
||||
for s in (llk[-NAV_FRAMES], nav[0]):
|
||||
pm.send(s.which(), s.as_builder().to_bytes())
|
||||
sm.update(1000)
|
||||
if sm.updated['navModel']:
|
||||
break
|
||||
time.sleep(1)
|
||||
|
||||
if not sm.updated['navModel']:
|
||||
raise Exception("no navmodeld outputs, failed to initialize")
|
||||
|
||||
# drain
|
||||
time.sleep(2)
|
||||
sm.update(0)
|
||||
|
||||
# run replay
|
||||
for n in range(len(llk) - NAV_FRAMES, len(llk)):
|
||||
pm.send(llk[n].which(), llk[n].as_builder().to_bytes())
|
||||
m = messaging.recv_one(sm.sock['navThumbnail'])
|
||||
assert m is not None, f"no navThumbnail, frame={n}"
|
||||
log_msgs.append(m)
|
||||
|
||||
m = messaging.recv_one(sm.sock['mapRenderState'])
|
||||
assert m is not None, f"no mapRenderState, frame={n}"
|
||||
log_msgs.append(m)
|
||||
|
||||
m = messaging.recv_one(sm.sock['navModel'])
|
||||
assert m is not None, f"no navModel response, frame={n}"
|
||||
log_msgs.append(m)
|
||||
finally:
|
||||
managed_processes['mapsd'].stop()
|
||||
managed_processes['navmodeld'].stop()
|
||||
|
||||
return log_msgs
|
||||
|
||||
|
||||
def model_replay(lr, frs):
|
||||
# modeld is using frame pairs
|
||||
modeld_logs = trim_logs_to_max_frames(lr, MAX_FRAMES, {"roadCameraState", "wideRoadCameraState"}, {"roadEncodeIdx", "wideRoadEncodeIdx", "carParams"})
|
||||
dmodeld_logs = trim_logs_to_max_frames(lr, MAX_FRAMES, {"driverCameraState"}, {"driverEncodeIdx", "carParams"})
|
||||
if not SEND_EXTRA_INPUTS:
|
||||
modeld_logs = [msg for msg in modeld_logs if msg.which() not in ["liveCalibration",]]
|
||||
dmodeld_logs = [msg for msg in dmodeld_logs if msg.which() not in ["liveCalibration",]]
|
||||
# initial calibration
|
||||
cal_msg = next(msg for msg in lr if msg.which() == "liveCalibration").as_builder()
|
||||
cal_msg.logMonoTime = lr[0].logMonoTime
|
||||
modeld_logs.insert(0, cal_msg.as_reader())
|
||||
dmodeld_logs.insert(0, cal_msg.as_reader())
|
||||
|
||||
modeld = get_process_config("modeld")
|
||||
dmonitoringmodeld = get_process_config("dmonitoringmodeld")
|
||||
|
||||
modeld_msgs = replay_process(modeld, modeld_logs, frs)
|
||||
dmonitoringmodeld_msgs = replay_process(dmonitoringmodeld, dmodeld_logs, frs)
|
||||
return modeld_msgs + dmonitoringmodeld_msgs
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
update = "--update" in sys.argv
|
||||
replay_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
ref_commit_fn = os.path.join(replay_dir, "model_replay_ref_commit")
|
||||
|
||||
# load logs
|
||||
lr = list(LogReader(get_url(TEST_ROUTE, SEGMENT)))
|
||||
frs = {
|
||||
'roadCameraState': FrameReader(get_url(TEST_ROUTE, SEGMENT, log_type="fcamera"), readahead=True),
|
||||
'driverCameraState': FrameReader(get_url(TEST_ROUTE, SEGMENT, log_type="dcamera"), readahead=True),
|
||||
'wideRoadCameraState': FrameReader(get_url(TEST_ROUTE, SEGMENT, log_type="ecamera"), readahead=True)
|
||||
}
|
||||
|
||||
# Update tile refs
|
||||
if update:
|
||||
import urllib
|
||||
import requests
|
||||
import threading
|
||||
import http.server
|
||||
from openpilot.tools.lib.openpilotci import upload_bytes
|
||||
os.environ['MAPS_HOST'] = 'http://localhost:5000'
|
||||
|
||||
class HTTPRequestHandler(http.server.BaseHTTPRequestHandler):
|
||||
def do_GET(self):
|
||||
assert len(self.path) > 10 # Sanity check on path length
|
||||
r = requests.get(f'https://api.mapbox.com{self.path}', timeout=30)
|
||||
upload_bytes(r.content, urllib.parse.urlparse(self.path).path.lstrip('/'))
|
||||
self.send_response(r.status_code)
|
||||
self.send_header('Content-type','text/html')
|
||||
self.end_headers()
|
||||
self.wfile.write(r.content)
|
||||
|
||||
server = http.server.HTTPServer(("127.0.0.1", 5000), HTTPRequestHandler)
|
||||
thread = threading.Thread(None, server.serve_forever, daemon=True)
|
||||
thread.start()
|
||||
else:
|
||||
os.environ['MAPS_HOST'] = BASE_URL.rstrip('/')
|
||||
|
||||
log_msgs = []
|
||||
# run replays
|
||||
if not NO_MODEL:
|
||||
log_msgs += model_replay(lr, frs)
|
||||
if not NO_NAV:
|
||||
log_msgs += nav_model_replay(lr)
|
||||
|
||||
# get diff
|
||||
failed = False
|
||||
if not update:
|
||||
with open(ref_commit_fn) as f:
|
||||
ref_commit = f.read().strip()
|
||||
log_fn = get_log_fn(ref_commit, TEST_ROUTE)
|
||||
try:
|
||||
all_logs = list(LogReader(BASE_URL + log_fn))
|
||||
cmp_log = []
|
||||
|
||||
# logs are ordered based on type: modelV2, driverStateV2, nav messages (navThumbnail, mapRenderState, navModel)
|
||||
if not NO_MODEL:
|
||||
model_start_index = next(i for i, m in enumerate(all_logs) if m.which() in ("modelV2", "cameraOdometry"))
|
||||
cmp_log += all_logs[model_start_index:model_start_index + MAX_FRAMES*2]
|
||||
dmon_start_index = next(i for i, m in enumerate(all_logs) if m.which() == "driverStateV2")
|
||||
cmp_log += all_logs[dmon_start_index:dmon_start_index + MAX_FRAMES]
|
||||
if not NO_NAV:
|
||||
nav_start_index = next(i for i, m in enumerate(all_logs) if m.which() in ["navThumbnail", "mapRenderState", "navModel"])
|
||||
nav_logs = all_logs[nav_start_index:nav_start_index + NAV_FRAMES*3]
|
||||
cmp_log += nav_logs
|
||||
|
||||
ignore = [
|
||||
'logMonoTime',
|
||||
'modelV2.frameDropPerc',
|
||||
'modelV2.modelExecutionTime',
|
||||
'driverStateV2.modelExecutionTime',
|
||||
'driverStateV2.dspExecutionTime',
|
||||
'navModel.dspExecutionTime',
|
||||
'navModel.modelExecutionTime',
|
||||
'navThumbnail.timestampEof',
|
||||
'mapRenderState.locationMonoTime',
|
||||
'mapRenderState.renderTime',
|
||||
]
|
||||
if PC:
|
||||
ignore += [
|
||||
'modelV2.laneLines.0.t',
|
||||
'modelV2.laneLines.1.t',
|
||||
'modelV2.laneLines.2.t',
|
||||
'modelV2.laneLines.3.t',
|
||||
'modelV2.roadEdges.0.t',
|
||||
'modelV2.roadEdges.1.t',
|
||||
]
|
||||
# TODO this tolerance is absurdly large
|
||||
tolerance = 2.0 if PC else None
|
||||
results: Any = {TEST_ROUTE: {}}
|
||||
log_paths: Any = {TEST_ROUTE: {"models": {'ref': BASE_URL + log_fn, 'new': log_fn}}}
|
||||
results[TEST_ROUTE]["models"] = compare_logs(cmp_log, log_msgs, tolerance=tolerance, ignore_fields=ignore)
|
||||
diff_short, diff_long, failed = format_diff(results, log_paths, ref_commit)
|
||||
|
||||
print(diff_long)
|
||||
print('-------------\n'*5)
|
||||
print(diff_short)
|
||||
with open("model_diff.txt", "w") as f:
|
||||
f.write(diff_long)
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
failed = True
|
||||
|
||||
# upload new refs
|
||||
if (update or failed) and not PC:
|
||||
from openpilot.tools.lib.openpilotci import upload_file
|
||||
|
||||
print("Uploading new refs")
|
||||
|
||||
new_commit = get_commit()
|
||||
log_fn = get_log_fn(new_commit, TEST_ROUTE)
|
||||
save_log(log_fn, log_msgs)
|
||||
try:
|
||||
upload_file(log_fn, os.path.basename(log_fn))
|
||||
except Exception as e:
|
||||
print("failed to upload", e)
|
||||
|
||||
with open(ref_commit_fn, 'w') as f:
|
||||
f.write(str(new_commit))
|
||||
|
||||
print("\n\nNew ref commit: ", new_commit)
|
||||
|
||||
sys.exit(int(failed))
|
||||
1
selfdrive/test/process_replay/model_replay_ref_commit
Normal file
1
selfdrive/test/process_replay/model_replay_ref_commit
Normal file
@@ -0,0 +1 @@
|
||||
e8b359a82316e6dfce3b6fb0fb9684431bfa0a1b
|
||||
800
selfdrive/test/process_replay/process_replay.py
Normal file
800
selfdrive/test/process_replay/process_replay.py
Normal file
@@ -0,0 +1,800 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import time
|
||||
import copy
|
||||
import json
|
||||
import heapq
|
||||
import signal
|
||||
import platform
|
||||
from collections import OrderedDict
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any
|
||||
from collections.abc import Callable, Iterable
|
||||
from tqdm import tqdm
|
||||
import capnp
|
||||
|
||||
import cereal.messaging as messaging
|
||||
from cereal import car
|
||||
from cereal.services import SERVICE_LIST
|
||||
from cereal.visionipc import VisionIpcServer, get_endpoint_name as vipc_get_endpoint_name
|
||||
from openpilot.common.params import Params
|
||||
from openpilot.common.prefix import OpenpilotPrefix
|
||||
from openpilot.common.timeout import Timeout
|
||||
from openpilot.common.realtime import DT_CTRL
|
||||
from panda.python import ALTERNATIVE_EXPERIENCE
|
||||
from openpilot.selfdrive.car.car_helpers import get_car, interfaces
|
||||
from openpilot.selfdrive.manager.process_config import managed_processes
|
||||
from openpilot.selfdrive.test.process_replay.vision_meta import meta_from_camera_state, available_streams
|
||||
from openpilot.selfdrive.test.process_replay.migration import migrate_all
|
||||
from openpilot.selfdrive.test.process_replay.capture import ProcessOutputCapture
|
||||
from openpilot.tools.lib.logreader import LogIterable
|
||||
from openpilot.tools.lib.framereader import BaseFrameReader
|
||||
|
||||
# Numpy gives different results based on CPU features after version 19
|
||||
NUMPY_TOLERANCE = 1e-7
|
||||
PROC_REPLAY_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
FAKEDATA = os.path.join(PROC_REPLAY_DIR, "fakedata/")
|
||||
|
||||
class DummySocket:
|
||||
def __init__(self):
|
||||
self.data: list[bytes] = []
|
||||
|
||||
def receive(self, non_blocking: bool = False) -> bytes | None:
|
||||
if non_blocking:
|
||||
return None
|
||||
|
||||
return self.data.pop()
|
||||
|
||||
def send(self, data: bytes):
|
||||
self.data.append(data)
|
||||
|
||||
class LauncherWithCapture:
|
||||
def __init__(self, capture: ProcessOutputCapture, launcher: Callable):
|
||||
self.capture = capture
|
||||
self.launcher = launcher
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
self.capture.link_with_current_proc()
|
||||
self.launcher(*args, **kwargs)
|
||||
|
||||
|
||||
class ReplayContext:
|
||||
def __init__(self, cfg):
|
||||
self.proc_name = cfg.proc_name
|
||||
self.pubs = cfg.pubs
|
||||
self.main_pub = cfg.main_pub
|
||||
self.main_pub_drained = cfg.main_pub_drained
|
||||
self.unlocked_pubs = cfg.unlocked_pubs
|
||||
assert(len(self.pubs) != 0 or self.main_pub is not None)
|
||||
|
||||
def __enter__(self):
|
||||
self.open_context()
|
||||
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_obj, exc_tb):
|
||||
self.close_context()
|
||||
|
||||
def open_context(self):
|
||||
messaging.toggle_fake_events(True)
|
||||
messaging.set_fake_prefix(self.proc_name)
|
||||
|
||||
if self.main_pub is None:
|
||||
self.events = OrderedDict()
|
||||
pubs_with_events = [pub for pub in self.pubs if pub not in self.unlocked_pubs]
|
||||
for pub in pubs_with_events:
|
||||
self.events[pub] = messaging.fake_event_handle(pub, enable=True)
|
||||
else:
|
||||
self.events = {self.main_pub: messaging.fake_event_handle(self.main_pub, enable=True)}
|
||||
|
||||
def close_context(self):
|
||||
del self.events
|
||||
|
||||
messaging.toggle_fake_events(False)
|
||||
messaging.delete_fake_prefix()
|
||||
|
||||
@property
|
||||
def all_recv_called_events(self):
|
||||
return [man.recv_called_event for man in self.events.values()]
|
||||
|
||||
@property
|
||||
def all_recv_ready_events(self):
|
||||
return [man.recv_ready_event for man in self.events.values()]
|
||||
|
||||
def send_sync(self, pm, endpoint, dat):
|
||||
self.events[endpoint].recv_called_event.wait()
|
||||
self.events[endpoint].recv_called_event.clear()
|
||||
pm.send(endpoint, dat)
|
||||
self.events[endpoint].recv_ready_event.set()
|
||||
|
||||
def unlock_sockets(self):
|
||||
expected_sets = len(self.events)
|
||||
while expected_sets > 0:
|
||||
index = messaging.wait_for_one_event(self.all_recv_called_events)
|
||||
self.all_recv_called_events[index].clear()
|
||||
self.all_recv_ready_events[index].set()
|
||||
expected_sets -= 1
|
||||
|
||||
def wait_for_recv_called(self):
|
||||
messaging.wait_for_one_event(self.all_recv_called_events)
|
||||
|
||||
def wait_for_next_recv(self, trigger_empty_recv):
|
||||
index = messaging.wait_for_one_event(self.all_recv_called_events)
|
||||
if self.main_pub is not None and self.main_pub_drained and trigger_empty_recv:
|
||||
self.all_recv_called_events[index].clear()
|
||||
self.all_recv_ready_events[index].set()
|
||||
self.all_recv_called_events[index].wait()
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProcessConfig:
|
||||
proc_name: str
|
||||
pubs: list[str]
|
||||
subs: list[str]
|
||||
ignore: list[str]
|
||||
config_callback: Callable | None = None
|
||||
init_callback: Callable | None = None
|
||||
should_recv_callback: Callable | None = None
|
||||
tolerance: float | None = None
|
||||
processing_time: float = 0.001
|
||||
timeout: int = 30
|
||||
simulation: bool = True
|
||||
main_pub: str | None = None
|
||||
main_pub_drained: bool = True
|
||||
vision_pubs: list[str] = field(default_factory=list)
|
||||
ignore_alive_pubs: list[str] = field(default_factory=list)
|
||||
unlocked_pubs: list[str] = field(default_factory=list)
|
||||
|
||||
|
||||
class ProcessContainer:
|
||||
def __init__(self, cfg: ProcessConfig):
|
||||
self.prefix = OpenpilotPrefix(clean_dirs_on_exit=False)
|
||||
self.cfg = copy.deepcopy(cfg)
|
||||
self.process = copy.deepcopy(managed_processes[cfg.proc_name])
|
||||
self.msg_queue: list[capnp._DynamicStructReader] = []
|
||||
self.cnt = 0
|
||||
self.pm: messaging.PubMaster | None = None
|
||||
self.sockets: list[messaging.SubSocket] | None = None
|
||||
self.rc: ReplayContext | None = None
|
||||
self.vipc_server: VisionIpcServer | None = None
|
||||
self.environ_config: dict[str, Any] | None = None
|
||||
self.capture: ProcessOutputCapture | None = None
|
||||
|
||||
@property
|
||||
def has_empty_queue(self) -> bool:
|
||||
return len(self.msg_queue) == 0
|
||||
|
||||
@property
|
||||
def pubs(self) -> list[str]:
|
||||
return self.cfg.pubs
|
||||
|
||||
@property
|
||||
def subs(self) -> list[str]:
|
||||
return self.cfg.subs
|
||||
|
||||
def _clean_env(self):
|
||||
for k in self.environ_config.keys():
|
||||
if k in os.environ:
|
||||
del os.environ[k]
|
||||
|
||||
for k in ["PROC_NAME", "SIMULATION"]:
|
||||
if k in os.environ:
|
||||
del os.environ[k]
|
||||
|
||||
def _setup_env(self, params_config: dict[str, Any], environ_config: dict[str, Any]):
|
||||
for k, v in environ_config.items():
|
||||
if len(v) != 0:
|
||||
os.environ[k] = v
|
||||
elif k in os.environ:
|
||||
del os.environ[k]
|
||||
|
||||
os.environ["PROC_NAME"] = self.cfg.proc_name
|
||||
if self.cfg.simulation:
|
||||
os.environ["SIMULATION"] = "1"
|
||||
elif "SIMULATION" in os.environ:
|
||||
del os.environ["SIMULATION"]
|
||||
|
||||
params = Params()
|
||||
for k, v in params_config.items():
|
||||
if isinstance(v, bool):
|
||||
params.put_bool(k, v)
|
||||
else:
|
||||
params.put(k, v)
|
||||
|
||||
self.environ_config = environ_config
|
||||
|
||||
def _setup_vision_ipc(self, all_msgs: LogIterable, frs: dict[str, Any]):
|
||||
assert len(self.cfg.vision_pubs) != 0
|
||||
|
||||
vipc_server = VisionIpcServer("camerad")
|
||||
streams_metas = available_streams(all_msgs)
|
||||
for meta in streams_metas:
|
||||
if meta.camera_state in self.cfg.vision_pubs:
|
||||
frame_size = (frs[meta.camera_state].w, frs[meta.camera_state].h)
|
||||
vipc_server.create_buffers(meta.stream, 2, False, *frame_size)
|
||||
vipc_server.start_listener()
|
||||
|
||||
self.vipc_server = vipc_server
|
||||
self.cfg.vision_pubs = [meta.camera_state for meta in streams_metas if meta.camera_state in self.cfg.vision_pubs]
|
||||
|
||||
def _start_process(self):
|
||||
if self.capture is not None:
|
||||
self.process.launcher = LauncherWithCapture(self.capture, self.process.launcher)
|
||||
self.process.prepare()
|
||||
self.process.start()
|
||||
|
||||
def start(
|
||||
self, params_config: dict[str, Any], environ_config: dict[str, Any],
|
||||
all_msgs: LogIterable, frs: dict[str, BaseFrameReader] | None,
|
||||
fingerprint: str | None, capture_output: bool
|
||||
):
|
||||
with self.prefix as p:
|
||||
self._setup_env(params_config, environ_config)
|
||||
|
||||
if self.cfg.config_callback is not None:
|
||||
params = Params()
|
||||
self.cfg.config_callback(params, self.cfg, all_msgs)
|
||||
|
||||
self.rc = ReplayContext(self.cfg)
|
||||
self.rc.open_context()
|
||||
|
||||
self.pm = messaging.PubMaster(self.cfg.pubs)
|
||||
self.sockets = [messaging.sub_sock(s, timeout=100) for s in self.cfg.subs]
|
||||
|
||||
if len(self.cfg.vision_pubs) != 0:
|
||||
assert frs is not None
|
||||
self._setup_vision_ipc(all_msgs, frs)
|
||||
assert self.vipc_server is not None
|
||||
|
||||
if capture_output:
|
||||
self.capture = ProcessOutputCapture(self.cfg.proc_name, p.prefix)
|
||||
|
||||
self._start_process()
|
||||
|
||||
if self.cfg.init_callback is not None:
|
||||
self.cfg.init_callback(self.rc, self.pm, all_msgs, fingerprint)
|
||||
|
||||
# wait for process to startup
|
||||
with Timeout(10, error_msg=f"timed out waiting for process to start: {repr(self.cfg.proc_name)}"):
|
||||
while not all(self.pm.all_readers_updated(s) for s in self.cfg.pubs if s not in self.cfg.ignore_alive_pubs):
|
||||
time.sleep(0)
|
||||
|
||||
def stop(self):
|
||||
with self.prefix:
|
||||
self.process.signal(signal.SIGKILL)
|
||||
self.process.stop()
|
||||
self.rc.close_context()
|
||||
self.prefix.clean_dirs()
|
||||
self._clean_env()
|
||||
|
||||
def run_step(self, msg: capnp._DynamicStructReader, frs: dict[str, BaseFrameReader] | None) -> list[capnp._DynamicStructReader]:
|
||||
assert self.rc and self.pm and self.sockets and self.process.proc
|
||||
|
||||
output_msgs = []
|
||||
with self.prefix, Timeout(self.cfg.timeout, error_msg=f"timed out testing process {repr(self.cfg.proc_name)}"):
|
||||
end_of_cycle = True
|
||||
if self.cfg.should_recv_callback is not None:
|
||||
end_of_cycle = self.cfg.should_recv_callback(msg, self.cfg, self.cnt)
|
||||
|
||||
self.msg_queue.append(msg)
|
||||
if end_of_cycle:
|
||||
self.rc.wait_for_recv_called()
|
||||
|
||||
# call recv to let sub-sockets reconnect, after we know the process is ready
|
||||
if self.cnt == 0:
|
||||
for s in self.sockets:
|
||||
messaging.recv_one_or_none(s)
|
||||
|
||||
# empty recv on drained pub indicates the end of messages, only do that if there're any
|
||||
trigger_empty_recv = False
|
||||
if self.cfg.main_pub and self.cfg.main_pub_drained:
|
||||
trigger_empty_recv = next((True for m in self.msg_queue if m.which() == self.cfg.main_pub), False)
|
||||
|
||||
for m in self.msg_queue:
|
||||
self.pm.send(m.which(), m.as_builder())
|
||||
# send frames if needed
|
||||
if self.vipc_server is not None and m.which() in self.cfg.vision_pubs:
|
||||
camera_state = getattr(m, m.which())
|
||||
camera_meta = meta_from_camera_state(m.which())
|
||||
assert frs is not None
|
||||
img = frs[m.which()].get(camera_state.frameId, pix_fmt="nv12")[0]
|
||||
self.vipc_server.send(camera_meta.stream, img.flatten().tobytes(),
|
||||
camera_state.frameId, camera_state.timestampSof, camera_state.timestampEof)
|
||||
self.msg_queue = []
|
||||
|
||||
self.rc.unlock_sockets()
|
||||
self.rc.wait_for_next_recv(trigger_empty_recv)
|
||||
|
||||
for socket in self.sockets:
|
||||
ms = messaging.drain_sock(socket)
|
||||
for m in ms:
|
||||
m = m.as_builder()
|
||||
m.logMonoTime = msg.logMonoTime + int(self.cfg.processing_time * 1e9)
|
||||
output_msgs.append(m.as_reader())
|
||||
self.cnt += 1
|
||||
assert self.process.proc.is_alive()
|
||||
|
||||
return output_msgs
|
||||
|
||||
|
||||
def controlsd_fingerprint_callback(rc, pm, msgs, fingerprint):
|
||||
print("start fingerprinting")
|
||||
params = Params()
|
||||
canmsgs = [msg for msg in msgs if msg.which() == "can"][:300]
|
||||
|
||||
# controlsd expects one arbitrary can and pandaState
|
||||
rc.send_sync(pm, "can", messaging.new_message("can", 1))
|
||||
pm.send("pandaStates", messaging.new_message("pandaStates", 1))
|
||||
rc.send_sync(pm, "can", messaging.new_message("can", 1))
|
||||
rc.wait_for_next_recv(True)
|
||||
|
||||
# fingerprinting is done, when CarParams is set
|
||||
while params.get("CarParams") is None:
|
||||
if len(canmsgs) == 0:
|
||||
raise ValueError("Fingerprinting failed. Run out of can msgs")
|
||||
|
||||
m = canmsgs.pop(0)
|
||||
rc.send_sync(pm, "can", m.as_builder().to_bytes())
|
||||
rc.wait_for_next_recv(False)
|
||||
|
||||
|
||||
def get_car_params_callback(rc, pm, msgs, fingerprint):
|
||||
params = Params()
|
||||
if fingerprint:
|
||||
CarInterface, _, _ = interfaces[fingerprint]
|
||||
CP = CarInterface.get_non_essential_params(fingerprint)
|
||||
else:
|
||||
can = DummySocket()
|
||||
sendcan = DummySocket()
|
||||
|
||||
canmsgs = [msg for msg in msgs if msg.which() == "can"]
|
||||
has_cached_cp = params.get("CarParamsCache") is not None
|
||||
assert len(canmsgs) != 0, "CAN messages are required for fingerprinting"
|
||||
assert os.environ.get("SKIP_FW_QUERY", False) or has_cached_cp, \
|
||||
"CarParamsCache is required for fingerprinting. Make sure to keep carParams msgs in the logs."
|
||||
|
||||
for m in canmsgs[:300]:
|
||||
can.send(m.as_builder().to_bytes())
|
||||
_, CP = get_car(can, sendcan, Params().get_bool("ExperimentalLongitudinalEnabled"))
|
||||
params.put("CarParams", CP.to_bytes())
|
||||
return CP
|
||||
|
||||
|
||||
def controlsd_rcv_callback(msg, cfg, frame):
|
||||
# no sendcan until controlsd is initialized
|
||||
if msg.which() != "can":
|
||||
return False
|
||||
|
||||
socks = [
|
||||
s for s in cfg.subs if
|
||||
frame % int(SERVICE_LIST[msg.which()].frequency / SERVICE_LIST[s].frequency) == 0
|
||||
]
|
||||
if "sendcan" in socks and (frame - 1) < 2000:
|
||||
socks.remove("sendcan")
|
||||
return len(socks) > 0
|
||||
|
||||
|
||||
def calibration_rcv_callback(msg, cfg, frame):
|
||||
# calibrationd publishes 1 calibrationData every 5 cameraOdometry packets.
|
||||
# should_recv always true to increment frame
|
||||
return (frame - 1) == 0 or msg.which() == 'cameraOdometry'
|
||||
|
||||
|
||||
def torqued_rcv_callback(msg, cfg, frame):
|
||||
# should_recv always true to increment frame
|
||||
return (frame - 1) == 0 or msg.which() == 'liveLocationKalman'
|
||||
|
||||
|
||||
def dmonitoringmodeld_rcv_callback(msg, cfg, frame):
|
||||
return msg.which() == "driverCameraState"
|
||||
|
||||
|
||||
class ModeldCameraSyncRcvCallback:
|
||||
def __init__(self):
|
||||
self.road_present = False
|
||||
self.wide_road_present = False
|
||||
self.is_dual_camera = True
|
||||
|
||||
def __call__(self, msg, cfg, frame):
|
||||
self.is_dual_camera = len(cfg.vision_pubs) == 2
|
||||
if msg.which() == "roadCameraState":
|
||||
self.road_present = True
|
||||
elif msg.which() == "wideRoadCameraState":
|
||||
self.wide_road_present = True
|
||||
|
||||
if self.road_present and self.wide_road_present:
|
||||
self.road_present, self.wide_road_present = False, False
|
||||
return True
|
||||
elif self.road_present and not self.is_dual_camera:
|
||||
self.road_present = False
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
class MessageBasedRcvCallback:
|
||||
def __init__(self, trigger_msg_type):
|
||||
self.trigger_msg_type = trigger_msg_type
|
||||
|
||||
def __call__(self, msg, cfg, frame):
|
||||
return msg.which() == self.trigger_msg_type
|
||||
|
||||
|
||||
class FrequencyBasedRcvCallback:
|
||||
def __init__(self, trigger_msg_type):
|
||||
self.trigger_msg_type = trigger_msg_type
|
||||
|
||||
def __call__(self, msg, cfg, frame):
|
||||
if msg.which() != self.trigger_msg_type:
|
||||
return False
|
||||
|
||||
resp_sockets = [
|
||||
s for s in cfg.subs
|
||||
if frame % max(1, int(SERVICE_LIST[msg.which()].frequency / SERVICE_LIST[s].frequency)) == 0
|
||||
]
|
||||
return bool(len(resp_sockets))
|
||||
|
||||
|
||||
def controlsd_config_callback(params, cfg, lr):
|
||||
controlsState = None
|
||||
initialized = False
|
||||
for msg in lr:
|
||||
if msg.which() == "controlsState":
|
||||
controlsState = msg.controlsState
|
||||
if initialized:
|
||||
break
|
||||
elif msg.which() == "onroadEvents":
|
||||
initialized = car.CarEvent.EventName.controlsInitializing not in [e.name for e in msg.onroadEvents]
|
||||
|
||||
assert controlsState is not None and initialized, "controlsState never initialized"
|
||||
params.put("ReplayControlsState", controlsState.as_builder().to_bytes())
|
||||
|
||||
|
||||
def locationd_config_pubsub_callback(params, cfg, lr):
|
||||
ublox = params.get_bool("UbloxAvailable")
|
||||
sub_keys = ({"gpsLocation", } if ublox else {"gpsLocationExternal", })
|
||||
|
||||
cfg.pubs = set(cfg.pubs) - sub_keys
|
||||
|
||||
|
||||
CONFIGS = [
|
||||
ProcessConfig(
|
||||
proc_name="controlsd",
|
||||
pubs=[
|
||||
"can", "deviceState", "pandaStates", "peripheralState", "liveCalibration", "driverMonitoringState",
|
||||
"longitudinalPlan", "liveLocationKalman", "liveParameters", "radarState",
|
||||
"modelV2", "driverCameraState", "roadCameraState", "wideRoadCameraState", "managerState",
|
||||
"testJoystick", "liveTorqueParameters", "accelerometer", "gyroscope"
|
||||
],
|
||||
subs=["controlsState", "carState", "carControl", "sendcan", "onroadEvents", "carParams"],
|
||||
ignore=["logMonoTime", "controlsState.startMonoTime", "controlsState.cumLagMs"],
|
||||
config_callback=controlsd_config_callback,
|
||||
init_callback=controlsd_fingerprint_callback,
|
||||
should_recv_callback=controlsd_rcv_callback,
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
processing_time=0.004,
|
||||
main_pub="can",
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="radard",
|
||||
pubs=["can", "carState", "modelV2"],
|
||||
subs=["radarState", "liveTracks"],
|
||||
ignore=["logMonoTime", "radarState.cumLagMs"],
|
||||
init_callback=get_car_params_callback,
|
||||
should_recv_callback=MessageBasedRcvCallback("can"),
|
||||
main_pub="can",
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="plannerd",
|
||||
pubs=["modelV2", "carControl", "carState", "controlsState", "radarState"],
|
||||
subs=["longitudinalPlan", "uiPlan"],
|
||||
ignore=["logMonoTime", "longitudinalPlan.processingDelay", "longitudinalPlan.solverExecutionTime"],
|
||||
init_callback=get_car_params_callback,
|
||||
should_recv_callback=FrequencyBasedRcvCallback("modelV2"),
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="calibrationd",
|
||||
pubs=["carState", "cameraOdometry", "carParams"],
|
||||
subs=["liveCalibration"],
|
||||
ignore=["logMonoTime"],
|
||||
should_recv_callback=calibration_rcv_callback,
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="dmonitoringd",
|
||||
pubs=["driverStateV2", "liveCalibration", "carState", "modelV2", "controlsState"],
|
||||
subs=["driverMonitoringState"],
|
||||
ignore=["logMonoTime"],
|
||||
should_recv_callback=FrequencyBasedRcvCallback("driverStateV2"),
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="locationd",
|
||||
pubs=[
|
||||
"cameraOdometry", "accelerometer", "gyroscope", "gpsLocationExternal",
|
||||
"liveCalibration", "carState", "gpsLocation"
|
||||
],
|
||||
subs=["liveLocationKalman"],
|
||||
ignore=["logMonoTime"],
|
||||
config_callback=locationd_config_pubsub_callback,
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="paramsd",
|
||||
pubs=["liveLocationKalman", "carState"],
|
||||
subs=["liveParameters"],
|
||||
ignore=["logMonoTime"],
|
||||
init_callback=get_car_params_callback,
|
||||
should_recv_callback=FrequencyBasedRcvCallback("liveLocationKalman"),
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
processing_time=0.004,
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="ubloxd",
|
||||
pubs=["ubloxRaw"],
|
||||
subs=["ubloxGnss", "gpsLocationExternal"],
|
||||
ignore=["logMonoTime"],
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="torqued",
|
||||
pubs=["liveLocationKalman", "carState", "carControl"],
|
||||
subs=["liveTorqueParameters"],
|
||||
ignore=["logMonoTime"],
|
||||
init_callback=get_car_params_callback,
|
||||
should_recv_callback=torqued_rcv_callback,
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="modeld",
|
||||
pubs=["roadCameraState", "wideRoadCameraState", "liveCalibration", "driverMonitoringState"],
|
||||
subs=["modelV2", "cameraOdometry"],
|
||||
ignore=["logMonoTime", "modelV2.frameDropPerc", "modelV2.modelExecutionTime"],
|
||||
should_recv_callback=ModeldCameraSyncRcvCallback(),
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
processing_time=0.020,
|
||||
main_pub=vipc_get_endpoint_name("camerad", meta_from_camera_state("roadCameraState").stream),
|
||||
main_pub_drained=False,
|
||||
vision_pubs=["roadCameraState", "wideRoadCameraState"],
|
||||
ignore_alive_pubs=["wideRoadCameraState"],
|
||||
init_callback=get_car_params_callback,
|
||||
),
|
||||
ProcessConfig(
|
||||
proc_name="dmonitoringmodeld",
|
||||
pubs=["liveCalibration", "driverCameraState"],
|
||||
subs=["driverStateV2"],
|
||||
ignore=["logMonoTime", "driverStateV2.modelExecutionTime", "driverStateV2.dspExecutionTime"],
|
||||
should_recv_callback=dmonitoringmodeld_rcv_callback,
|
||||
tolerance=NUMPY_TOLERANCE,
|
||||
processing_time=0.020,
|
||||
main_pub=vipc_get_endpoint_name("camerad", meta_from_camera_state("driverCameraState").stream),
|
||||
main_pub_drained=False,
|
||||
vision_pubs=["driverCameraState"],
|
||||
ignore_alive_pubs=["driverCameraState"],
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
def get_process_config(name: str) -> ProcessConfig:
|
||||
try:
|
||||
return copy.deepcopy(next(c for c in CONFIGS if c.proc_name == name))
|
||||
except StopIteration as ex:
|
||||
raise Exception(f"Cannot find process config with name: {name}") from ex
|
||||
|
||||
|
||||
def get_custom_params_from_lr(lr: LogIterable, initial_state: str = "first") -> dict[str, Any]:
|
||||
"""
|
||||
Use this to get custom params dict based on provided logs.
|
||||
Useful when replaying following processes: calibrationd, paramsd, torqued
|
||||
The params may be based on first or last message of given type (carParams, liveCalibration, liveParameters, liveTorqueParameters) in the logs.
|
||||
"""
|
||||
|
||||
car_params = [m for m in lr if m.which() == "carParams"]
|
||||
live_calibration = [m for m in lr if m.which() == "liveCalibration"]
|
||||
live_parameters = [m for m in lr if m.which() == "liveParameters"]
|
||||
live_torque_parameters = [m for m in lr if m.which() == "liveTorqueParameters"]
|
||||
|
||||
assert initial_state in ["first", "last"]
|
||||
msg_index = 0 if initial_state == "first" else -1
|
||||
|
||||
assert len(car_params) > 0, "carParams required for initial state of liveParameters and CarParamsPrevRoute"
|
||||
CP = car_params[msg_index].carParams
|
||||
|
||||
custom_params = {
|
||||
"CarParamsPrevRoute": CP.as_builder().to_bytes()
|
||||
}
|
||||
|
||||
if len(live_calibration) > 0:
|
||||
custom_params["CalibrationParams"] = live_calibration[msg_index].as_builder().to_bytes()
|
||||
if len(live_parameters) > 0:
|
||||
lp_dict = live_parameters[msg_index].to_dict()
|
||||
lp_dict["carFingerprint"] = CP.carFingerprint
|
||||
custom_params["LiveParameters"] = json.dumps(lp_dict)
|
||||
if len(live_torque_parameters) > 0:
|
||||
custom_params["LiveTorqueParameters"] = live_torque_parameters[msg_index].as_builder().to_bytes()
|
||||
|
||||
return custom_params
|
||||
|
||||
|
||||
def replay_process_with_name(name: str | Iterable[str], lr: LogIterable, *args, **kwargs) -> list[capnp._DynamicStructReader]:
|
||||
if isinstance(name, str):
|
||||
cfgs = [get_process_config(name)]
|
||||
elif isinstance(name, Iterable):
|
||||
cfgs = [get_process_config(n) for n in name]
|
||||
else:
|
||||
raise ValueError("name must be str or collections of strings")
|
||||
|
||||
return replay_process(cfgs, lr, *args, **kwargs)
|
||||
|
||||
|
||||
def replay_process(
|
||||
cfg: ProcessConfig | Iterable[ProcessConfig], lr: LogIterable, frs: dict[str, BaseFrameReader] = None,
|
||||
fingerprint: str = None, return_all_logs: bool = False, custom_params: dict[str, Any] = None,
|
||||
captured_output_store: dict[str, dict[str, str]] = None, disable_progress: bool = False
|
||||
) -> list[capnp._DynamicStructReader]:
|
||||
if isinstance(cfg, Iterable):
|
||||
cfgs = list(cfg)
|
||||
else:
|
||||
cfgs = [cfg]
|
||||
|
||||
all_msgs = migrate_all(lr, old_logtime=True,
|
||||
manager_states=True,
|
||||
panda_states=any("pandaStates" in cfg.pubs for cfg in cfgs),
|
||||
camera_states=any(len(cfg.vision_pubs) != 0 for cfg in cfgs))
|
||||
process_logs = _replay_multi_process(cfgs, all_msgs, frs, fingerprint, custom_params, captured_output_store, disable_progress)
|
||||
|
||||
if return_all_logs:
|
||||
keys = {m.which() for m in process_logs}
|
||||
modified_logs = [m for m in all_msgs if m.which() not in keys]
|
||||
modified_logs.extend(process_logs)
|
||||
modified_logs.sort(key=lambda m: int(m.logMonoTime))
|
||||
log_msgs = modified_logs
|
||||
else:
|
||||
log_msgs = process_logs
|
||||
|
||||
return log_msgs
|
||||
|
||||
|
||||
def _replay_multi_process(
|
||||
cfgs: list[ProcessConfig], lr: LogIterable, frs: dict[str, BaseFrameReader] | None, fingerprint: str | None,
|
||||
custom_params: dict[str, Any] | None, captured_output_store: dict[str, dict[str, str]] | None, disable_progress: bool
|
||||
) -> list[capnp._DynamicStructReader]:
|
||||
if fingerprint is not None:
|
||||
params_config = generate_params_config(lr=lr, fingerprint=fingerprint, custom_params=custom_params)
|
||||
env_config = generate_environ_config(fingerprint=fingerprint)
|
||||
else:
|
||||
CP = next((m.carParams for m in lr if m.which() == "carParams"), None)
|
||||
params_config = generate_params_config(lr=lr, CP=CP, custom_params=custom_params)
|
||||
env_config = generate_environ_config(CP=CP)
|
||||
|
||||
# validate frs and vision pubs
|
||||
all_vision_pubs = [pub for cfg in cfgs for pub in cfg.vision_pubs]
|
||||
if len(all_vision_pubs) != 0:
|
||||
assert frs is not None, "frs must be provided when replaying process using vision streams"
|
||||
assert all(meta_from_camera_state(st) is not None for st in all_vision_pubs), \
|
||||
f"undefined vision stream spotted, probably misconfigured process: (vision pubs: {all_vision_pubs})"
|
||||
required_vision_pubs = {m.camera_state for m in available_streams(lr)} & set(all_vision_pubs)
|
||||
assert all(st in frs for st in required_vision_pubs), f"frs for this process must contain following vision streams: {required_vision_pubs}"
|
||||
|
||||
all_msgs = sorted(lr, key=lambda msg: msg.logMonoTime)
|
||||
log_msgs = []
|
||||
try:
|
||||
containers = []
|
||||
for cfg in cfgs:
|
||||
container = ProcessContainer(cfg)
|
||||
containers.append(container)
|
||||
container.start(params_config, env_config, all_msgs, frs, fingerprint, captured_output_store is not None)
|
||||
|
||||
all_pubs = {pub for container in containers for pub in container.pubs}
|
||||
all_subs = {sub for container in containers for sub in container.subs}
|
||||
lr_pubs = all_pubs - all_subs
|
||||
pubs_to_containers = {pub: [container for container in containers if pub in container.pubs] for pub in all_pubs}
|
||||
|
||||
pub_msgs = [msg for msg in all_msgs if msg.which() in lr_pubs]
|
||||
# external queue for messages taken from logs; internal queue for messages generated by processes, which will be republished
|
||||
external_pub_queue: list[capnp._DynamicStructReader] = pub_msgs.copy()
|
||||
internal_pub_queue: list[capnp._DynamicStructReader] = []
|
||||
# heap for maintaining the order of messages generated by processes, where each element: (logMonoTime, index in internal_pub_queue)
|
||||
internal_pub_index_heap: list[tuple[int, int]] = []
|
||||
|
||||
pbar = tqdm(total=len(external_pub_queue), disable=disable_progress)
|
||||
while len(external_pub_queue) != 0 or (len(internal_pub_index_heap) != 0 and not all(c.has_empty_queue for c in containers)):
|
||||
if len(internal_pub_index_heap) == 0 or (len(external_pub_queue) != 0 and external_pub_queue[0].logMonoTime < internal_pub_index_heap[0][0]):
|
||||
msg = external_pub_queue.pop(0)
|
||||
pbar.update(1)
|
||||
else:
|
||||
_, index = heapq.heappop(internal_pub_index_heap)
|
||||
msg = internal_pub_queue[index]
|
||||
|
||||
target_containers = pubs_to_containers[msg.which()]
|
||||
for container in target_containers:
|
||||
output_msgs = container.run_step(msg, frs)
|
||||
for m in output_msgs:
|
||||
if m.which() in all_pubs:
|
||||
internal_pub_queue.append(m)
|
||||
heapq.heappush(internal_pub_index_heap, (m.logMonoTime, len(internal_pub_queue) - 1))
|
||||
log_msgs.extend(output_msgs)
|
||||
finally:
|
||||
for container in containers:
|
||||
container.stop()
|
||||
if captured_output_store is not None:
|
||||
assert container.capture is not None
|
||||
out, err = container.capture.read_outerr()
|
||||
captured_output_store[container.cfg.proc_name] = {"out": out, "err": err}
|
||||
|
||||
return log_msgs
|
||||
|
||||
|
||||
def generate_params_config(lr=None, CP=None, fingerprint=None, custom_params=None) -> dict[str, Any]:
|
||||
params_dict = {
|
||||
"OpenpilotEnabledToggle": True,
|
||||
"DisengageOnAccelerator": True,
|
||||
"DisableLogging": False,
|
||||
}
|
||||
|
||||
if custom_params is not None:
|
||||
params_dict.update(custom_params)
|
||||
if lr is not None:
|
||||
has_ublox = any(msg.which() == "ubloxGnss" for msg in lr)
|
||||
params_dict["UbloxAvailable"] = has_ublox
|
||||
is_rhd = next((msg.driverMonitoringState.isRHD for msg in lr if msg.which() == "driverMonitoringState"), False)
|
||||
params_dict["IsRhdDetected"] = is_rhd
|
||||
|
||||
if CP is not None:
|
||||
if CP.alternativeExperience == ALTERNATIVE_EXPERIENCE.DISABLE_DISENGAGE_ON_GAS:
|
||||
params_dict["DisengageOnAccelerator"] = False
|
||||
|
||||
if fingerprint is None:
|
||||
if CP.fingerprintSource == "fw":
|
||||
params_dict["CarParamsCache"] = CP.as_builder().to_bytes()
|
||||
|
||||
if CP.openpilotLongitudinalControl:
|
||||
params_dict["ExperimentalLongitudinalEnabled"] = True
|
||||
|
||||
if CP.notCar:
|
||||
params_dict["JoystickDebugMode"] = True
|
||||
|
||||
return params_dict
|
||||
|
||||
|
||||
def generate_environ_config(CP=None, fingerprint=None, log_dir=None) -> dict[str, Any]:
|
||||
environ_dict = {}
|
||||
if platform.system() != "Darwin":
|
||||
environ_dict["PARAMS_ROOT"] = "/dev/shm/params"
|
||||
if log_dir is not None:
|
||||
environ_dict["LOG_ROOT"] = log_dir
|
||||
|
||||
environ_dict["REPLAY"] = "1"
|
||||
|
||||
# Regen or python process
|
||||
if CP is not None and fingerprint is None:
|
||||
if CP.fingerprintSource == "fw":
|
||||
environ_dict['SKIP_FW_QUERY'] = ""
|
||||
environ_dict['FINGERPRINT'] = ""
|
||||
else:
|
||||
environ_dict['SKIP_FW_QUERY'] = "1"
|
||||
environ_dict['FINGERPRINT'] = CP.carFingerprint
|
||||
elif fingerprint is not None:
|
||||
environ_dict['SKIP_FW_QUERY'] = "1"
|
||||
environ_dict['FINGERPRINT'] = fingerprint
|
||||
else:
|
||||
environ_dict["SKIP_FW_QUERY"] = ""
|
||||
environ_dict["FINGERPRINT"] = ""
|
||||
|
||||
return environ_dict
|
||||
|
||||
|
||||
def check_openpilot_enabled(msgs: LogIterable) -> bool:
|
||||
cur_enabled_count = 0
|
||||
max_enabled_count = 0
|
||||
for msg in msgs:
|
||||
if msg.which() == "carParams":
|
||||
if msg.carParams.notCar:
|
||||
return True
|
||||
elif msg.which() == "controlsState":
|
||||
if msg.controlsState.active:
|
||||
cur_enabled_count += 1
|
||||
else:
|
||||
cur_enabled_count = 0
|
||||
max_enabled_count = max(max_enabled_count, cur_enabled_count)
|
||||
|
||||
return max_enabled_count > int(10. / DT_CTRL)
|
||||
1
selfdrive/test/process_replay/ref_commit
Normal file
1
selfdrive/test/process_replay/ref_commit
Normal file
@@ -0,0 +1 @@
|
||||
43efe1cf08cba8c86bc1ae8234b3d3d084a40e5d
|
||||
158
selfdrive/test/process_replay/regen.py
Normal file
158
selfdrive/test/process_replay/regen.py
Normal file
@@ -0,0 +1,158 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import argparse
|
||||
import time
|
||||
import capnp
|
||||
import numpy as np
|
||||
|
||||
from typing import Any
|
||||
from collections.abc import Iterable
|
||||
|
||||
from openpilot.selfdrive.test.process_replay.process_replay import CONFIGS, FAKEDATA, ProcessConfig, replay_process, get_process_config, \
|
||||
check_openpilot_enabled, get_custom_params_from_lr
|
||||
from openpilot.selfdrive.test.process_replay.vision_meta import DRIVER_FRAME_SIZES
|
||||
from openpilot.selfdrive.test.update_ci_routes import upload_route
|
||||
from openpilot.tools.lib.route import Route
|
||||
from openpilot.tools.lib.framereader import FrameReader, BaseFrameReader, FrameType
|
||||
from openpilot.tools.lib.logreader import LogReader, LogIterable
|
||||
from openpilot.tools.lib.helpers import save_log
|
||||
|
||||
|
||||
class DummyFrameReader(BaseFrameReader):
|
||||
def __init__(self, w: int, h: int, frame_count: int, pix_val: int):
|
||||
self.pix_val = pix_val
|
||||
self.w, self.h = w, h
|
||||
self.frame_count = frame_count
|
||||
self.frame_type = FrameType.raw
|
||||
|
||||
def get(self, idx, count=1, pix_fmt="yuv420p"):
|
||||
if pix_fmt == "rgb24":
|
||||
shape = (self.h, self.w, 3)
|
||||
elif pix_fmt == "nv12" or pix_fmt == "yuv420p":
|
||||
shape = (int((self.h * self.w) * 3 / 2),)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
return [np.full(shape, self.pix_val, dtype=np.uint8) for _ in range(count)]
|
||||
|
||||
@staticmethod
|
||||
def zero_dcamera():
|
||||
return DummyFrameReader(*DRIVER_FRAME_SIZES["tici"], 1200, 0)
|
||||
|
||||
|
||||
def regen_segment(
|
||||
lr: LogIterable, frs: dict[str, Any] = None,
|
||||
processes: Iterable[ProcessConfig] = CONFIGS, disable_tqdm: bool = False
|
||||
) -> list[capnp._DynamicStructReader]:
|
||||
all_msgs = sorted(lr, key=lambda m: m.logMonoTime)
|
||||
custom_params = get_custom_params_from_lr(all_msgs)
|
||||
|
||||
print("Replayed processes:", [p.proc_name for p in processes])
|
||||
print("\n\n", "*"*30, "\n\n", sep="")
|
||||
|
||||
output_logs = replay_process(processes, all_msgs, frs, return_all_logs=True, custom_params=custom_params, disable_progress=disable_tqdm)
|
||||
|
||||
return output_logs
|
||||
|
||||
|
||||
def setup_data_readers(
|
||||
route: str, sidx: int, use_route_meta: bool,
|
||||
needs_driver_cam: bool = True, needs_road_cam: bool = True, dummy_driver_cam: bool = False
|
||||
) -> tuple[LogReader, dict[str, Any]]:
|
||||
if use_route_meta:
|
||||
r = Route(route)
|
||||
lr = LogReader(r.log_paths()[sidx])
|
||||
frs = {}
|
||||
if needs_road_cam and len(r.camera_paths()) > sidx and r.camera_paths()[sidx] is not None:
|
||||
frs['roadCameraState'] = FrameReader(r.camera_paths()[sidx])
|
||||
if needs_road_cam and len(r.ecamera_paths()) > sidx and r.ecamera_paths()[sidx] is not None:
|
||||
frs['wideRoadCameraState'] = FrameReader(r.ecamera_paths()[sidx])
|
||||
if needs_driver_cam:
|
||||
if dummy_driver_cam:
|
||||
frs['driverCameraState'] = DummyFrameReader.zero_dcamera()
|
||||
elif len(r.dcamera_paths()) > sidx and r.dcamera_paths()[sidx] is not None:
|
||||
device_type = next(str(msg.initData.deviceType) for msg in lr if msg.which() == "initData")
|
||||
assert device_type != "neo", "Driver camera not supported on neo segments. Use dummy dcamera."
|
||||
frs['driverCameraState'] = FrameReader(r.dcamera_paths()[sidx])
|
||||
else:
|
||||
lr = LogReader(f"cd:/{route.replace('|', '/')}/{sidx}/rlog.bz2")
|
||||
frs = {}
|
||||
if needs_road_cam:
|
||||
frs['roadCameraState'] = FrameReader(f"cd:/{route.replace('|', '/')}/{sidx}/fcamera.hevc")
|
||||
if next((True for m in lr if m.which() == "wideRoadCameraState"), False):
|
||||
frs['wideRoadCameraState'] = FrameReader(f"cd:/{route.replace('|', '/')}/{sidx}/ecamera.hevc")
|
||||
if needs_driver_cam:
|
||||
if dummy_driver_cam:
|
||||
frs['driverCameraState'] = DummyFrameReader.zero_dcamera()
|
||||
else:
|
||||
device_type = next(str(msg.initData.deviceType) for msg in lr if msg.which() == "initData")
|
||||
assert device_type != "neo", "Driver camera not supported on neo segments. Use dummy dcamera."
|
||||
frs['driverCameraState'] = FrameReader(f"cd:/{route.replace('|', '/')}/{sidx}/dcamera.hevc")
|
||||
|
||||
return lr, frs
|
||||
|
||||
|
||||
def regen_and_save(
|
||||
route: str, sidx: int, processes: str | Iterable[str] = "all", outdir: str = FAKEDATA,
|
||||
upload: bool = False, use_route_meta: bool = False, disable_tqdm: bool = False, dummy_driver_cam: bool = False
|
||||
) -> str:
|
||||
if not isinstance(processes, str) and not hasattr(processes, "__iter__"):
|
||||
raise ValueError("whitelist_proc must be a string or iterable")
|
||||
|
||||
if processes != "all":
|
||||
if isinstance(processes, str):
|
||||
raise ValueError(f"Invalid value for processes: {processes}")
|
||||
|
||||
replayed_processes = []
|
||||
for d in processes:
|
||||
cfg = get_process_config(d)
|
||||
replayed_processes.append(cfg)
|
||||
else:
|
||||
replayed_processes = CONFIGS
|
||||
|
||||
all_vision_pubs = {pub for cfg in replayed_processes for pub in cfg.vision_pubs}
|
||||
lr, frs = setup_data_readers(route, sidx, use_route_meta,
|
||||
needs_driver_cam="driverCameraState" in all_vision_pubs,
|
||||
needs_road_cam="roadCameraState" in all_vision_pubs or "wideRoadCameraState" in all_vision_pubs,
|
||||
dummy_driver_cam=dummy_driver_cam)
|
||||
output_logs = regen_segment(lr, frs, replayed_processes, disable_tqdm=disable_tqdm)
|
||||
|
||||
log_dir = os.path.join(outdir, time.strftime("%Y-%m-%d--%H-%M-%S--0", time.gmtime()))
|
||||
rel_log_dir = os.path.relpath(log_dir)
|
||||
rpath = os.path.join(log_dir, "rlog.bz2")
|
||||
|
||||
os.makedirs(log_dir)
|
||||
save_log(rpath, output_logs, compress=True)
|
||||
|
||||
print("\n\n", "*"*30, "\n\n", sep="")
|
||||
print("New route:", rel_log_dir, "\n")
|
||||
|
||||
if not check_openpilot_enabled(output_logs):
|
||||
raise Exception("Route did not engage for long enough")
|
||||
|
||||
if upload:
|
||||
upload_route(rel_log_dir)
|
||||
|
||||
return rel_log_dir
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
def comma_separated_list(string):
|
||||
return string.split(",")
|
||||
|
||||
all_procs = [p.proc_name for p in CONFIGS]
|
||||
parser = argparse.ArgumentParser(description="Generate new segments from old ones")
|
||||
parser.add_argument("--upload", action="store_true", help="Upload the new segment to the CI bucket")
|
||||
parser.add_argument("--outdir", help="log output dir", default=FAKEDATA)
|
||||
parser.add_argument("--dummy-dcamera", action='store_true', help="Use dummy blank driver camera")
|
||||
parser.add_argument("--whitelist-procs", type=comma_separated_list, default=all_procs,
|
||||
help="Comma-separated whitelist of processes to regen (e.g. controlsd,radard)")
|
||||
parser.add_argument("--blacklist-procs", type=comma_separated_list, default=[],
|
||||
help="Comma-separated blacklist of processes to regen (e.g. controlsd,radard)")
|
||||
parser.add_argument("route", type=str, help="The source route")
|
||||
parser.add_argument("seg", type=int, help="Segment in source route")
|
||||
args = parser.parse_args()
|
||||
|
||||
blacklist_set = set(args.blacklist_procs)
|
||||
processes = [p for p in args.whitelist_procs if p not in blacklist_set]
|
||||
regen_and_save(args.route, args.seg, processes=processes, upload=args.upload, outdir=args.outdir, dummy_driver_cam=args.dummy_dcamera)
|
||||
54
selfdrive/test/process_replay/regen_all.py
Normal file
54
selfdrive/test/process_replay/regen_all.py
Normal file
@@ -0,0 +1,54 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import concurrent.futures
|
||||
import os
|
||||
import random
|
||||
import traceback
|
||||
from tqdm import tqdm
|
||||
|
||||
from openpilot.common.prefix import OpenpilotPrefix
|
||||
from openpilot.selfdrive.test.process_replay.regen import regen_and_save
|
||||
from openpilot.selfdrive.test.process_replay.test_processes import FAKEDATA, source_segments as segments
|
||||
from openpilot.tools.lib.route import SegmentName
|
||||
|
||||
|
||||
def regen_job(segment, upload, disable_tqdm):
|
||||
with OpenpilotPrefix():
|
||||
sn = SegmentName(segment[1])
|
||||
fake_dongle_id = 'regen' + ''.join(random.choice('0123456789ABCDEF') for _ in range(11))
|
||||
try:
|
||||
relr = regen_and_save(sn.route_name.canonical_name, sn.segment_num, upload=upload, use_route_meta=False,
|
||||
outdir=os.path.join(FAKEDATA, fake_dongle_id), disable_tqdm=disable_tqdm, dummy_driver_cam=True)
|
||||
relr = '|'.join(relr.split('/')[-2:])
|
||||
return f' ("{segment[0]}", "{relr}"), '
|
||||
except Exception as e:
|
||||
err = f" {segment} failed: {str(e)}"
|
||||
err += traceback.format_exc()
|
||||
err += "\n\n"
|
||||
return err
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
all_cars = {car for car, _ in segments}
|
||||
|
||||
parser = argparse.ArgumentParser(description="Generate new segments from old ones")
|
||||
parser.add_argument("-j", "--jobs", type=int, default=1)
|
||||
parser.add_argument("--no-upload", action="store_true")
|
||||
parser.add_argument("--whitelist-cars", type=str, nargs="*", default=all_cars,
|
||||
help="Whitelist given cars from the test (e.g. HONDA)")
|
||||
parser.add_argument("--blacklist-cars", type=str, nargs="*", default=[],
|
||||
help="Blacklist given cars from the test (e.g. HONDA)")
|
||||
args = parser.parse_args()
|
||||
|
||||
tested_cars = set(args.whitelist_cars) - set(args.blacklist_cars)
|
||||
tested_cars = {c.upper() for c in tested_cars}
|
||||
tested_segments = [(car, segment) for car, segment in segments if car in tested_cars]
|
||||
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=args.jobs) as pool:
|
||||
p = pool.map(regen_job, tested_segments, [not args.no_upload] * len(tested_segments), [args.jobs > 1] * len(tested_segments))
|
||||
msg = "Copy these new segments into test_processes.py:"
|
||||
for seg in tqdm(p, desc="Generating segments", total=len(tested_segments)):
|
||||
msg += "\n" + str(seg)
|
||||
print()
|
||||
print()
|
||||
print(msg)
|
||||
196
selfdrive/test/process_replay/test_debayer.py
Normal file
196
selfdrive/test/process_replay/test_debayer.py
Normal file
@@ -0,0 +1,196 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import bz2
|
||||
import numpy as np
|
||||
|
||||
import pyopencl as cl # install with `PYOPENCL_CL_PRETEND_VERSION=2.0 pip install pyopencl`
|
||||
|
||||
from openpilot.system.hardware import PC, TICI
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
from openpilot.tools.lib.openpilotci import BASE_URL
|
||||
from openpilot.system.version import get_commit
|
||||
from openpilot.system.camerad.snapshot.snapshot import yuv_to_rgb
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
from openpilot.tools.lib.filereader import FileReader
|
||||
|
||||
TEST_ROUTE = "8345e3b82948d454|2022-05-04--13-45-33/0"
|
||||
|
||||
FRAME_WIDTH = 1928
|
||||
FRAME_HEIGHT = 1208
|
||||
FRAME_STRIDE = 2896
|
||||
|
||||
UV_WIDTH = FRAME_WIDTH // 2
|
||||
UV_HEIGHT = FRAME_HEIGHT // 2
|
||||
UV_SIZE = UV_WIDTH * UV_HEIGHT
|
||||
|
||||
|
||||
def get_frame_fn(ref_commit, test_route, tici=True):
|
||||
return f"{test_route}_debayer{'_tici' if tici else ''}_{ref_commit}.bz2"
|
||||
|
||||
|
||||
def bzip_frames(frames):
|
||||
data = b''
|
||||
for y, u, v in frames:
|
||||
data += y.tobytes()
|
||||
data += u.tobytes()
|
||||
data += v.tobytes()
|
||||
return bz2.compress(data)
|
||||
|
||||
|
||||
def unbzip_frames(url):
|
||||
with FileReader(url) as f:
|
||||
dat = f.read()
|
||||
|
||||
data = bz2.decompress(dat)
|
||||
|
||||
res = []
|
||||
for y_start in range(0, len(data), FRAME_WIDTH * FRAME_HEIGHT + UV_SIZE * 2):
|
||||
u_start = y_start + FRAME_WIDTH * FRAME_HEIGHT
|
||||
v_start = u_start + UV_SIZE
|
||||
|
||||
y = np.frombuffer(data[y_start: u_start], dtype=np.uint8).reshape((FRAME_HEIGHT, FRAME_WIDTH))
|
||||
u = np.frombuffer(data[u_start: v_start], dtype=np.uint8).reshape((UV_HEIGHT, UV_WIDTH))
|
||||
v = np.frombuffer(data[v_start: v_start + UV_SIZE], dtype=np.uint8).reshape((UV_HEIGHT, UV_WIDTH))
|
||||
|
||||
res.append((y, u, v))
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def init_kernels(frame_offset=0):
|
||||
ctx = cl.create_some_context(interactive=False)
|
||||
|
||||
with open(os.path.join(BASEDIR, 'system/camerad/cameras/real_debayer.cl')) as f:
|
||||
build_args = ' -cl-fast-relaxed-math -cl-denorms-are-zero -cl-single-precision-constant' + \
|
||||
f' -DFRAME_STRIDE={FRAME_STRIDE} -DRGB_WIDTH={FRAME_WIDTH} -DRGB_HEIGHT={FRAME_HEIGHT} -DFRAME_OFFSET={frame_offset} -DCAM_NUM=0'
|
||||
if PC:
|
||||
build_args += ' -DHALF_AS_FLOAT=1 -cl-std=CL2.0'
|
||||
debayer_prg = cl.Program(ctx, f.read()).build(options=build_args)
|
||||
|
||||
return ctx, debayer_prg
|
||||
|
||||
def debayer_frame(ctx, debayer_prg, data, rgb=False):
|
||||
q = cl.CommandQueue(ctx)
|
||||
|
||||
yuv_buff = np.empty(FRAME_WIDTH * FRAME_HEIGHT + UV_SIZE * 2, dtype=np.uint8)
|
||||
|
||||
cam_g = cl.Buffer(ctx, cl.mem_flags.READ_ONLY | cl.mem_flags.COPY_HOST_PTR, hostbuf=data)
|
||||
yuv_g = cl.Buffer(ctx, cl.mem_flags.WRITE_ONLY, FRAME_WIDTH * FRAME_HEIGHT + UV_SIZE * 2)
|
||||
|
||||
local_worksize = (20, 20) if TICI else (4, 4)
|
||||
ev1 = debayer_prg.debayer10(q, (UV_WIDTH, UV_HEIGHT), local_worksize, cam_g, yuv_g)
|
||||
cl.enqueue_copy(q, yuv_buff, yuv_g, wait_for=[ev1]).wait()
|
||||
cl.enqueue_barrier(q)
|
||||
|
||||
y = yuv_buff[:FRAME_WIDTH*FRAME_HEIGHT].reshape((FRAME_HEIGHT, FRAME_WIDTH))
|
||||
u = yuv_buff[FRAME_WIDTH*FRAME_HEIGHT:FRAME_WIDTH*FRAME_HEIGHT+UV_SIZE].reshape((UV_HEIGHT, UV_WIDTH))
|
||||
v = yuv_buff[FRAME_WIDTH*FRAME_HEIGHT+UV_SIZE:].reshape((UV_HEIGHT, UV_WIDTH))
|
||||
|
||||
if rgb:
|
||||
return yuv_to_rgb(y, u, v)
|
||||
else:
|
||||
return y, u, v
|
||||
|
||||
|
||||
def debayer_replay(lr):
|
||||
ctx, debayer_prg = init_kernels()
|
||||
|
||||
frames = []
|
||||
for m in lr:
|
||||
if m.which() == 'roadCameraState':
|
||||
cs = m.roadCameraState
|
||||
if cs.image:
|
||||
data = np.frombuffer(cs.image, dtype=np.uint8)
|
||||
img = debayer_frame(ctx, debayer_prg, data)
|
||||
|
||||
frames.append(img)
|
||||
|
||||
return frames
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
update = "--update" in sys.argv
|
||||
replay_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
ref_commit_fn = os.path.join(replay_dir, "debayer_replay_ref_commit")
|
||||
|
||||
# load logs
|
||||
lr = list(LogReader(TEST_ROUTE))
|
||||
|
||||
# run replay
|
||||
frames = debayer_replay(lr)
|
||||
|
||||
# get diff
|
||||
failed = False
|
||||
diff = ''
|
||||
yuv_i = ['y', 'u', 'v']
|
||||
if not update:
|
||||
with open(ref_commit_fn) as f:
|
||||
ref_commit = f.read().strip()
|
||||
frame_fn = get_frame_fn(ref_commit, TEST_ROUTE, tici=TICI)
|
||||
|
||||
try:
|
||||
cmp_frames = unbzip_frames(BASE_URL + frame_fn)
|
||||
|
||||
if len(frames) != len(cmp_frames):
|
||||
failed = True
|
||||
diff += 'amount of frames not equal\n'
|
||||
|
||||
for i, (frame, cmp_frame) in enumerate(zip(frames, cmp_frames, strict=True)):
|
||||
for j in range(3):
|
||||
fr = frame[j]
|
||||
cmp_f = cmp_frame[j]
|
||||
if fr.shape != cmp_f.shape:
|
||||
failed = True
|
||||
diff += f'frame shapes not equal for ({i}, {yuv_i[j]})\n'
|
||||
diff += f'{ref_commit}: {cmp_f.shape}\n'
|
||||
diff += f'HEAD: {fr.shape}\n'
|
||||
elif not np.array_equal(fr, cmp_f):
|
||||
failed = True
|
||||
if np.allclose(fr, cmp_f, atol=1):
|
||||
diff += f'frames not equal for ({i}, {yuv_i[j]}), but are all close\n'
|
||||
else:
|
||||
diff += f'frames not equal for ({i}, {yuv_i[j]})\n'
|
||||
|
||||
frame_diff = np.abs(np.subtract(fr, cmp_f))
|
||||
diff_len = len(np.nonzero(frame_diff)[0])
|
||||
if diff_len > 10000:
|
||||
diff += f'different at a large amount of pixels ({diff_len})\n'
|
||||
else:
|
||||
diff += 'different at (frame, yuv, pixel, ref, HEAD):\n'
|
||||
for k in zip(*np.nonzero(frame_diff), strict=True):
|
||||
diff += f'{i}, {yuv_i[j]}, {k}, {cmp_f[k]}, {fr[k]}\n'
|
||||
|
||||
if failed:
|
||||
print(diff)
|
||||
with open("debayer_diff.txt", "w") as f:
|
||||
f.write(diff)
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
failed = True
|
||||
|
||||
# upload new refs
|
||||
if update or (failed and TICI):
|
||||
from openpilot.tools.lib.openpilotci import upload_file
|
||||
|
||||
print("Uploading new refs")
|
||||
|
||||
frames_bzip = bzip_frames(frames)
|
||||
|
||||
new_commit = get_commit()
|
||||
frame_fn = os.path.join(replay_dir, get_frame_fn(new_commit, TEST_ROUTE, tici=TICI))
|
||||
with open(frame_fn, "wb") as f2:
|
||||
f2.write(frames_bzip)
|
||||
|
||||
try:
|
||||
upload_file(frame_fn, os.path.basename(frame_fn))
|
||||
except Exception as e:
|
||||
print("failed to upload", e)
|
||||
|
||||
if update:
|
||||
with open(ref_commit_fn, 'w') as f:
|
||||
f.write(str(new_commit))
|
||||
|
||||
print("\nNew ref commit: ", new_commit)
|
||||
|
||||
sys.exit(int(failed))
|
||||
33
selfdrive/test/process_replay/test_fuzzy.py
Normal file
33
selfdrive/test/process_replay/test_fuzzy.py
Normal file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env python3
|
||||
import copy
|
||||
from hypothesis import given, HealthCheck, Phase, settings
|
||||
import hypothesis.strategies as st
|
||||
from parameterized import parameterized
|
||||
import unittest
|
||||
|
||||
from cereal import log
|
||||
from openpilot.selfdrive.car.toyota.values import CAR as TOYOTA
|
||||
from openpilot.selfdrive.test.fuzzy_generation import FuzzyGenerator
|
||||
import openpilot.selfdrive.test.process_replay.process_replay as pr
|
||||
|
||||
# These processes currently fail because of unrealistic data breaking assumptions
|
||||
# that openpilot makes causing error with NaN, inf, int size, array indexing ...
|
||||
# TODO: Make each one testable
|
||||
NOT_TESTED = ['controlsd', 'plannerd', 'calibrationd', 'dmonitoringd', 'paramsd', 'dmonitoringmodeld', 'modeld']
|
||||
|
||||
TEST_CASES = [(cfg.proc_name, copy.deepcopy(cfg)) for cfg in pr.CONFIGS if cfg.proc_name not in NOT_TESTED]
|
||||
|
||||
class TestFuzzProcesses(unittest.TestCase):
|
||||
|
||||
# TODO: make this faster and increase examples
|
||||
@parameterized.expand(TEST_CASES)
|
||||
@given(st.data())
|
||||
@settings(phases=[Phase.generate, Phase.target], max_examples=10, deadline=1000, suppress_health_check=[HealthCheck.too_slow, HealthCheck.data_too_large])
|
||||
def test_fuzz_process(self, proc_name, cfg, data):
|
||||
msgs = FuzzyGenerator.get_random_event_msg(data.draw, events=cfg.pubs, real_floats=True)
|
||||
lr = [log.Event.new_message(**m).as_reader() for m in msgs]
|
||||
cfg.timeout = 5
|
||||
pr.replay_process(cfg, lr, fingerprint=TOYOTA.COROLLA_TSS2, disable_progress=True)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
231
selfdrive/test/process_replay/test_processes.py
Normal file
231
selfdrive/test/process_replay/test_processes.py
Normal file
@@ -0,0 +1,231 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import concurrent.futures
|
||||
import os
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from tqdm import tqdm
|
||||
from typing import Any
|
||||
|
||||
from openpilot.selfdrive.car.car_helpers import interface_names
|
||||
from openpilot.tools.lib.openpilotci import get_url, upload_file
|
||||
from openpilot.selfdrive.test.process_replay.compare_logs import compare_logs, format_diff
|
||||
from openpilot.selfdrive.test.process_replay.process_replay import CONFIGS, PROC_REPLAY_DIR, FAKEDATA, check_openpilot_enabled, replay_process
|
||||
from openpilot.system.version import get_commit
|
||||
from openpilot.tools.lib.filereader import FileReader
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
from openpilot.tools.lib.helpers import save_log
|
||||
|
||||
source_segments = [
|
||||
("BODY", "937ccb7243511b65|2022-05-24--16-03-09--1"), # COMMA.BODY
|
||||
("HYUNDAI", "02c45f73a2e5c6e9|2021-01-01--19-08-22--1"), # HYUNDAI.SONATA
|
||||
("HYUNDAI2", "d545129f3ca90f28|2022-11-07--20-43-08--3"), # HYUNDAI.KIA_EV6 (+ QCOM GPS)
|
||||
("TOYOTA", "0982d79ebb0de295|2021-01-04--17-13-21--13"), # TOYOTA.PRIUS
|
||||
("TOYOTA2", "0982d79ebb0de295|2021-01-03--20-03-36--6"), # TOYOTA.RAV4
|
||||
("TOYOTA3", "f7d7e3538cda1a2a|2021-08-16--08-55-34--6"), # TOYOTA.COROLLA_TSS2
|
||||
("HONDA", "eb140f119469d9ab|2021-06-12--10-46-24--27"), # HONDA.CIVIC (NIDEC)
|
||||
("HONDA2", "7d2244f34d1bbcda|2021-06-25--12-25-37--26"), # HONDA.ACCORD (BOSCH)
|
||||
("CHRYSLER", "4deb27de11bee626|2021-02-20--11-28-55--8"), # CHRYSLER.PACIFICA_2018_HYBRID
|
||||
("RAM", "17fc16d840fe9d21|2023-04-26--13-28-44--5"), # CHRYSLER.RAM_1500
|
||||
("SUBARU", "341dccd5359e3c97|2022-09-12--10-35-33--3"), # SUBARU.OUTBACK
|
||||
("GM", "0c58b6a25109da2b|2021-02-23--16-35-50--11"), # GM.VOLT
|
||||
("GM2", "376bf99325883932|2022-10-27--13-41-22--1"), # GM.BOLT_EUV
|
||||
("NISSAN", "35336926920f3571|2021-02-12--18-38-48--46"), # NISSAN.XTRAIL
|
||||
("VOLKSWAGEN", "de9592456ad7d144|2021-06-29--11-00-15--6"), # VOLKSWAGEN.GOLF
|
||||
("MAZDA", "bd6a637565e91581|2021-10-30--15-14-53--4"), # MAZDA.CX9_2021
|
||||
("FORD", "54827bf84c38b14f|2023-01-26--21-59-07--4"), # FORD.BRONCO_SPORT_MK1
|
||||
|
||||
# Enable when port is tested and dashcamOnly is no longer set
|
||||
#("TESLA", "bb50caf5f0945ab1|2021-06-19--17-20-18--3"), # TESLA.AP2_MODELS
|
||||
#("VOLKSWAGEN2", "3cfdec54aa035f3f|2022-07-19--23-45-10--2"), # VOLKSWAGEN.PASSAT_NMS
|
||||
]
|
||||
|
||||
segments = [
|
||||
("BODY", "regen997DF2697CB|2023-10-30--23-14-29--0"),
|
||||
("HYUNDAI", "regen2A9D2A8E0B4|2023-10-30--23-13-34--0"),
|
||||
("HYUNDAI2", "regen6CA24BC3035|2023-10-30--23-14-28--0"),
|
||||
("TOYOTA", "regen5C019D76307|2023-10-30--23-13-31--0"),
|
||||
("TOYOTA2", "regen5DCADA88A96|2023-10-30--23-14-57--0"),
|
||||
("TOYOTA3", "regen7204CA3A498|2023-10-30--23-15-55--0"),
|
||||
("HONDA", "regen048F8FA0B24|2023-10-30--23-15-53--0"),
|
||||
("HONDA2", "regen7D2D3F82D5B|2023-10-30--23-15-55--0"),
|
||||
("CHRYSLER", "regen7125C42780C|2023-10-30--23-16-21--0"),
|
||||
("RAM", "regen2731F3213D2|2023-10-30--23-18-11--0"),
|
||||
("SUBARU", "regen86E4C1B4DDD|2023-10-30--23-18-14--0"),
|
||||
("GM", "regenF6393D64745|2023-10-30--23-17-18--0"),
|
||||
("GM2", "regen220F830C05B|2023-10-30--23-18-39--0"),
|
||||
("NISSAN", "regen4F671F7C435|2023-10-30--23-18-40--0"),
|
||||
("VOLKSWAGEN", "regen8BDFE7307A0|2023-10-30--23-19-36--0"),
|
||||
("MAZDA", "regen2E9F1A15FD5|2023-10-30--23-20-36--0"),
|
||||
("FORD", "regen6D39E54606E|2023-10-30--23-20-54--0"),
|
||||
]
|
||||
|
||||
# dashcamOnly makes don't need to be tested until a full port is done
|
||||
excluded_interfaces = ["mock", "tesla"]
|
||||
|
||||
BASE_URL = "https://commadataci.blob.core.windows.net/openpilotci/"
|
||||
REF_COMMIT_FN = os.path.join(PROC_REPLAY_DIR, "ref_commit")
|
||||
EXCLUDED_PROCS = {"modeld", "dmonitoringmodeld"}
|
||||
|
||||
|
||||
def run_test_process(data):
|
||||
segment, cfg, args, cur_log_fn, ref_log_path, lr_dat = data
|
||||
res = None
|
||||
if not args.upload_only:
|
||||
lr = LogReader.from_bytes(lr_dat)
|
||||
res, log_msgs = test_process(cfg, lr, segment, ref_log_path, cur_log_fn, args.ignore_fields, args.ignore_msgs)
|
||||
# save logs so we can upload when updating refs
|
||||
save_log(cur_log_fn, log_msgs)
|
||||
|
||||
if args.update_refs or args.upload_only:
|
||||
print(f'Uploading: {os.path.basename(cur_log_fn)}')
|
||||
assert os.path.exists(cur_log_fn), f"Cannot find log to upload: {cur_log_fn}"
|
||||
upload_file(cur_log_fn, os.path.basename(cur_log_fn))
|
||||
os.remove(cur_log_fn)
|
||||
return (segment, cfg.proc_name, res)
|
||||
|
||||
|
||||
def get_log_data(segment):
|
||||
r, n = segment.rsplit("--", 1)
|
||||
with FileReader(get_url(r, n)) as f:
|
||||
return (segment, f.read())
|
||||
|
||||
|
||||
def test_process(cfg, lr, segment, ref_log_path, new_log_path, ignore_fields=None, ignore_msgs=None):
|
||||
if ignore_fields is None:
|
||||
ignore_fields = []
|
||||
if ignore_msgs is None:
|
||||
ignore_msgs = []
|
||||
|
||||
ref_log_msgs = list(LogReader(ref_log_path))
|
||||
|
||||
try:
|
||||
log_msgs = replay_process(cfg, lr, disable_progress=True)
|
||||
except Exception as e:
|
||||
raise Exception("failed on segment: " + segment) from e
|
||||
|
||||
# check to make sure openpilot is engaged in the route
|
||||
if cfg.proc_name == "controlsd":
|
||||
if not check_openpilot_enabled(log_msgs):
|
||||
# FIXME: these segments should work, but the replay enabling logic is too brittle
|
||||
if segment not in ("regen6CA24BC3035|2023-10-30--23-14-28--0", "regen7D2D3F82D5B|2023-10-30--23-15-55--0"):
|
||||
return f"Route did not enable at all or for long enough: {new_log_path}", log_msgs
|
||||
|
||||
try:
|
||||
return compare_logs(ref_log_msgs, log_msgs, ignore_fields + cfg.ignore, ignore_msgs, cfg.tolerance), log_msgs
|
||||
except Exception as e:
|
||||
return str(e), log_msgs
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
all_cars = {car for car, _ in segments}
|
||||
all_procs = {cfg.proc_name for cfg in CONFIGS if cfg.proc_name not in EXCLUDED_PROCS}
|
||||
|
||||
cpu_count = os.cpu_count() or 1
|
||||
|
||||
parser = argparse.ArgumentParser(description="Regression test to identify changes in a process's output")
|
||||
parser.add_argument("--whitelist-procs", type=str, nargs="*", default=all_procs,
|
||||
help="Whitelist given processes from the test (e.g. controlsd)")
|
||||
parser.add_argument("--whitelist-cars", type=str, nargs="*", default=all_cars,
|
||||
help="Whitelist given cars from the test (e.g. HONDA)")
|
||||
parser.add_argument("--blacklist-procs", type=str, nargs="*", default=[],
|
||||
help="Blacklist given processes from the test (e.g. controlsd)")
|
||||
parser.add_argument("--blacklist-cars", type=str, nargs="*", default=[],
|
||||
help="Blacklist given cars from the test (e.g. HONDA)")
|
||||
parser.add_argument("--ignore-fields", type=str, nargs="*", default=[],
|
||||
help="Extra fields or msgs to ignore (e.g. carState.events)")
|
||||
parser.add_argument("--ignore-msgs", type=str, nargs="*", default=[],
|
||||
help="Msgs to ignore (e.g. carEvents)")
|
||||
parser.add_argument("--update-refs", action="store_true",
|
||||
help="Updates reference logs using current commit")
|
||||
parser.add_argument("--upload-only", action="store_true",
|
||||
help="Skips testing processes and uploads logs from previous test run")
|
||||
parser.add_argument("-j", "--jobs", type=int, default=max(cpu_count - 2, 1),
|
||||
help="Max amount of parallel jobs")
|
||||
args = parser.parse_args()
|
||||
|
||||
tested_procs = set(args.whitelist_procs) - set(args.blacklist_procs)
|
||||
tested_cars = set(args.whitelist_cars) - set(args.blacklist_cars)
|
||||
tested_cars = {c.upper() for c in tested_cars}
|
||||
|
||||
full_test = (tested_procs == all_procs) and (tested_cars == all_cars) and all(len(x) == 0 for x in (args.ignore_fields, args.ignore_msgs))
|
||||
upload = args.update_refs or args.upload_only
|
||||
os.makedirs(os.path.dirname(FAKEDATA), exist_ok=True)
|
||||
|
||||
if upload:
|
||||
assert full_test, "Need to run full test when updating refs"
|
||||
|
||||
try:
|
||||
with open(REF_COMMIT_FN) as f:
|
||||
ref_commit = f.read().strip()
|
||||
except FileNotFoundError:
|
||||
print("Couldn't find reference commit")
|
||||
sys.exit(1)
|
||||
|
||||
cur_commit = get_commit()
|
||||
if not cur_commit:
|
||||
raise Exception("Couldn't get current commit")
|
||||
|
||||
print(f"***** testing against commit {ref_commit} *****")
|
||||
|
||||
# check to make sure all car brands are tested
|
||||
if full_test:
|
||||
untested = (set(interface_names) - set(excluded_interfaces)) - {c.lower() for c in tested_cars}
|
||||
assert len(untested) == 0, f"Cars missing routes: {str(untested)}"
|
||||
|
||||
log_paths: defaultdict[str, dict[str, dict[str, str]]] = defaultdict(lambda: defaultdict(dict))
|
||||
with concurrent.futures.ProcessPoolExecutor(max_workers=args.jobs) as pool:
|
||||
if not args.upload_only:
|
||||
download_segments = [seg for car, seg in segments if car in tested_cars]
|
||||
log_data: dict[str, LogReader] = {}
|
||||
p1 = pool.map(get_log_data, download_segments)
|
||||
for segment, lr in tqdm(p1, desc="Getting Logs", total=len(download_segments)):
|
||||
log_data[segment] = lr
|
||||
|
||||
pool_args: Any = []
|
||||
for car_brand, segment in segments:
|
||||
if car_brand not in tested_cars:
|
||||
continue
|
||||
|
||||
for cfg in CONFIGS:
|
||||
if cfg.proc_name not in tested_procs:
|
||||
continue
|
||||
|
||||
cur_log_fn = os.path.join(FAKEDATA, f"{segment}_{cfg.proc_name}_{cur_commit}.bz2")
|
||||
if args.update_refs: # reference logs will not exist if routes were just regenerated
|
||||
ref_log_path = get_url(*segment.rsplit("--", 1))
|
||||
else:
|
||||
ref_log_fn = os.path.join(FAKEDATA, f"{segment}_{cfg.proc_name}_{ref_commit}.bz2")
|
||||
ref_log_path = ref_log_fn if os.path.exists(ref_log_fn) else BASE_URL + os.path.basename(ref_log_fn)
|
||||
|
||||
dat = None if args.upload_only else log_data[segment]
|
||||
pool_args.append((segment, cfg, args, cur_log_fn, ref_log_path, dat))
|
||||
|
||||
log_paths[segment][cfg.proc_name]['ref'] = ref_log_path
|
||||
log_paths[segment][cfg.proc_name]['new'] = cur_log_fn
|
||||
|
||||
results: Any = defaultdict(dict)
|
||||
p2 = pool.map(run_test_process, pool_args)
|
||||
for (segment, proc, result) in tqdm(p2, desc="Running Tests", total=len(pool_args)):
|
||||
if not args.upload_only:
|
||||
results[segment][proc] = result
|
||||
|
||||
diff_short, diff_long, failed = format_diff(results, log_paths, ref_commit)
|
||||
if not upload:
|
||||
with open(os.path.join(PROC_REPLAY_DIR, "diff.txt"), "w") as f:
|
||||
f.write(diff_long)
|
||||
print(diff_short)
|
||||
|
||||
if failed:
|
||||
print("TEST FAILED")
|
||||
print("\n\nTo push the new reference logs for this commit run:")
|
||||
print("./test_processes.py --upload-only")
|
||||
else:
|
||||
print("TEST SUCCEEDED")
|
||||
|
||||
else:
|
||||
with open(REF_COMMIT_FN, "w") as f:
|
||||
f.write(cur_commit)
|
||||
print(f"\n\nUpdated reference logs for commit: {cur_commit}")
|
||||
|
||||
sys.exit(int(failed))
|
||||
45
selfdrive/test/process_replay/test_regen.py
Normal file
45
selfdrive/test/process_replay/test_regen.py
Normal file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import unittest
|
||||
|
||||
from parameterized import parameterized
|
||||
|
||||
from openpilot.selfdrive.test.process_replay.regen import regen_segment, DummyFrameReader
|
||||
from openpilot.selfdrive.test.process_replay.process_replay import check_openpilot_enabled
|
||||
from openpilot.tools.lib.openpilotci import get_url
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
from openpilot.tools.lib.framereader import FrameReader
|
||||
|
||||
TESTED_SEGMENTS = [
|
||||
("PRIUS_C2", "0982d79ebb0de295|2021-01-04--17-13-21--13"), # TOYOTA PRIUS 2017: NEO, pandaStateDEPRECATED, no peripheralState, sensorEventsDEPRECATED
|
||||
# Enable these once regen on CI becomes faster or use them for different tests running controlsd in isolation
|
||||
# ("MAZDA_C3", "bd6a637565e91581|2021-10-30--15-14-53--4"), # MAZDA.CX9_2021: TICI, incomplete managerState
|
||||
# ("FORD_C3", "54827bf84c38b14f|2023-01-26--21-59-07--4"), # FORD.BRONCO_SPORT_MK1: TICI
|
||||
]
|
||||
|
||||
|
||||
def ci_setup_data_readers(route, sidx):
|
||||
lr = LogReader(get_url(route, sidx, "rlog"))
|
||||
frs = {
|
||||
'roadCameraState': FrameReader(get_url(route, sidx, "fcamera")),
|
||||
'driverCameraState': DummyFrameReader.zero_dcamera()
|
||||
}
|
||||
if next((True for m in lr if m.which() == "wideRoadCameraState"), False):
|
||||
frs["wideRoadCameraState"] = FrameReader(get_url(route, sidx, "ecamera"))
|
||||
|
||||
return lr, frs
|
||||
|
||||
|
||||
class TestRegen(unittest.TestCase):
|
||||
@parameterized.expand(TESTED_SEGMENTS)
|
||||
def test_engaged(self, case_name, segment):
|
||||
route, sidx = segment.rsplit("--", 1)
|
||||
lr, frs = ci_setup_data_readers(route, sidx)
|
||||
output_logs = regen_segment(lr, frs, disable_tqdm=True)
|
||||
|
||||
engaged = check_openpilot_enabled(output_logs)
|
||||
self.assertTrue(engaged, f"openpilot not engaged in {case_name}")
|
||||
|
||||
|
||||
if __name__=='__main__':
|
||||
unittest.main()
|
||||
43
selfdrive/test/process_replay/vision_meta.py
Normal file
43
selfdrive/test/process_replay/vision_meta.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from collections import namedtuple
|
||||
from cereal.visionipc import VisionStreamType
|
||||
from openpilot.common.realtime import DT_MDL, DT_DMON
|
||||
from openpilot.common.transformations.camera import tici_f_frame_size, tici_d_frame_size, tici_e_frame_size, eon_f_frame_size, eon_d_frame_size
|
||||
|
||||
VideoStreamMeta = namedtuple("VideoStreamMeta", ["camera_state", "encode_index", "stream", "dt", "frame_sizes"])
|
||||
ROAD_CAMERA_FRAME_SIZES = {"tici": tici_f_frame_size, "tizi": tici_f_frame_size, "neo": eon_f_frame_size}
|
||||
WIDE_ROAD_CAMERA_FRAME_SIZES = {"tici": tici_e_frame_size, "tizi": tici_e_frame_size}
|
||||
DRIVER_FRAME_SIZES = {"tici": tici_d_frame_size, "tizi": tici_d_frame_size, "neo": eon_d_frame_size}
|
||||
VIPC_STREAM_METADATA = [
|
||||
# metadata: (state_msg_type, encode_msg_type, stream_type, dt, frame_sizes)
|
||||
("roadCameraState", "roadEncodeIdx", VisionStreamType.VISION_STREAM_ROAD, DT_MDL, ROAD_CAMERA_FRAME_SIZES),
|
||||
("wideRoadCameraState", "wideRoadEncodeIdx", VisionStreamType.VISION_STREAM_WIDE_ROAD, DT_MDL, WIDE_ROAD_CAMERA_FRAME_SIZES),
|
||||
("driverCameraState", "driverEncodeIdx", VisionStreamType.VISION_STREAM_DRIVER, DT_DMON, DRIVER_FRAME_SIZES),
|
||||
]
|
||||
|
||||
|
||||
def meta_from_camera_state(state):
|
||||
meta = next((VideoStreamMeta(*meta) for meta in VIPC_STREAM_METADATA if meta[0] == state), None)
|
||||
return meta
|
||||
|
||||
|
||||
def meta_from_encode_index(encode_index):
|
||||
meta = next((VideoStreamMeta(*meta) for meta in VIPC_STREAM_METADATA if meta[1] == encode_index), None)
|
||||
return meta
|
||||
|
||||
|
||||
def meta_from_stream_type(stream_type):
|
||||
meta = next((VideoStreamMeta(*meta) for meta in VIPC_STREAM_METADATA if meta[2] == stream_type), None)
|
||||
return meta
|
||||
|
||||
|
||||
def available_streams(lr=None):
|
||||
if lr is None:
|
||||
return [VideoStreamMeta(*meta) for meta in VIPC_STREAM_METADATA]
|
||||
|
||||
result = []
|
||||
for meta in VIPC_STREAM_METADATA:
|
||||
has_cam_state = next((True for m in lr if m.which() == meta[0]), False)
|
||||
if has_cam_state:
|
||||
result.append(VideoStreamMeta(*meta))
|
||||
|
||||
return result
|
||||
2
selfdrive/test/profiling/.gitignore
vendored
Normal file
2
selfdrive/test/profiling/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
cachegrind.out.*
|
||||
*.prof
|
||||
0
selfdrive/test/profiling/__init__.py
Normal file
0
selfdrive/test/profiling/__init__.py
Normal file
91
selfdrive/test/profiling/lib.py
Normal file
91
selfdrive/test/profiling/lib.py
Normal file
@@ -0,0 +1,91 @@
|
||||
from collections import defaultdict, deque
|
||||
from cereal.services import SERVICE_LIST
|
||||
import cereal.messaging as messaging
|
||||
import capnp
|
||||
|
||||
|
||||
class ReplayDone(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class SubSocket():
|
||||
def __init__(self, msgs, trigger):
|
||||
self.i = 0
|
||||
self.trigger = trigger
|
||||
self.msgs = [m.as_builder().to_bytes() for m in msgs if m.which() == trigger]
|
||||
self.max_i = len(self.msgs) - 1
|
||||
|
||||
def receive(self, non_blocking=False):
|
||||
if non_blocking:
|
||||
return None
|
||||
|
||||
if self.i == self.max_i:
|
||||
raise ReplayDone
|
||||
|
||||
while True:
|
||||
msg = self.msgs[self.i]
|
||||
self.i += 1
|
||||
return msg
|
||||
|
||||
|
||||
class PubSocket():
|
||||
def send(self, data):
|
||||
pass
|
||||
|
||||
|
||||
class SubMaster(messaging.SubMaster):
|
||||
def __init__(self, msgs, trigger, services, check_averag_freq=False):
|
||||
self.frame = 0
|
||||
self.data = {}
|
||||
self.ignore_alive = []
|
||||
|
||||
self.alive = {s: True for s in services}
|
||||
self.updated = {s: False for s in services}
|
||||
self.rcv_time = {s: 0. for s in services}
|
||||
self.rcv_frame = {s: 0 for s in services}
|
||||
self.valid = {s: True for s in services}
|
||||
self.freq_ok = {s: True for s in services}
|
||||
self.recv_dts = {s: deque([0.0] * messaging.AVG_FREQ_HISTORY, maxlen=messaging.AVG_FREQ_HISTORY) for s in services}
|
||||
self.logMonoTime = {}
|
||||
self.sock = {}
|
||||
self.freq = {}
|
||||
self.check_average_freq = check_averag_freq
|
||||
self.non_polled_services = []
|
||||
self.ignore_average_freq = []
|
||||
|
||||
# TODO: specify multiple triggers for service like plannerd that poll on more than one service
|
||||
cur_msgs = []
|
||||
self.msgs = []
|
||||
msgs = [m for m in msgs if m.which() in services]
|
||||
|
||||
for msg in msgs:
|
||||
cur_msgs.append(msg)
|
||||
if msg.which() == trigger:
|
||||
self.msgs.append(cur_msgs)
|
||||
cur_msgs = []
|
||||
|
||||
self.msgs = list(reversed(self.msgs))
|
||||
|
||||
for s in services:
|
||||
self.freq[s] = SERVICE_LIST[s].frequency
|
||||
try:
|
||||
data = messaging.new_message(s)
|
||||
except capnp.lib.capnp.KjException:
|
||||
# lists
|
||||
data = messaging.new_message(s, 0)
|
||||
|
||||
self.data[s] = getattr(data, s)
|
||||
self.logMonoTime[s] = 0
|
||||
self.sock[s] = SubSocket(msgs, s)
|
||||
|
||||
def update(self, timeout=None):
|
||||
if not len(self.msgs):
|
||||
raise ReplayDone
|
||||
|
||||
cur_msgs = self.msgs.pop()
|
||||
self.update_msgs(cur_msgs[0].logMonoTime, self.msgs.pop())
|
||||
|
||||
|
||||
class PubMaster(messaging.PubMaster):
|
||||
def __init__(self):
|
||||
self.sock = defaultdict(PubSocket)
|
||||
97
selfdrive/test/profiling/profiler.py
Normal file
97
selfdrive/test/profiling/profiler.py
Normal file
@@ -0,0 +1,97 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import cProfile
|
||||
import pprofile
|
||||
import pyprof2calltree
|
||||
|
||||
from openpilot.common.params import Params
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
from openpilot.selfdrive.test.profiling.lib import SubMaster, PubMaster, SubSocket, ReplayDone
|
||||
from openpilot.selfdrive.test.process_replay.process_replay import CONFIGS
|
||||
from openpilot.selfdrive.car.toyota.values import CAR as TOYOTA
|
||||
from openpilot.selfdrive.car.honda.values import CAR as HONDA
|
||||
from openpilot.selfdrive.car.volkswagen.values import CAR as VW
|
||||
|
||||
BASE_URL = "https://commadataci.blob.core.windows.net/openpilotci/"
|
||||
|
||||
CARS = {
|
||||
'toyota': ("0982d79ebb0de295|2021-01-03--20-03-36/6", TOYOTA.RAV4),
|
||||
'honda': ("0982d79ebb0de295|2021-01-08--10-13-10/6", HONDA.CIVIC),
|
||||
"vw": ("ef895f46af5fd73f|2021-05-22--14-06-35/6", VW.AUDI_A3_MK3),
|
||||
}
|
||||
|
||||
|
||||
def get_inputs(msgs, process, fingerprint):
|
||||
for config in CONFIGS:
|
||||
if config.proc_name == process:
|
||||
sub_socks = list(config.pubs)
|
||||
trigger = sub_socks[0]
|
||||
break
|
||||
|
||||
# some procs block on CarParams
|
||||
for msg in msgs:
|
||||
if msg.which() == 'carParams':
|
||||
m = msg.as_builder()
|
||||
m.carParams.carFingerprint = fingerprint
|
||||
Params().put("CarParams", m.carParams.copy().to_bytes())
|
||||
break
|
||||
|
||||
sm = SubMaster(msgs, trigger, sub_socks)
|
||||
pm = PubMaster()
|
||||
if 'can' in sub_socks:
|
||||
can_sock = SubSocket(msgs, 'can')
|
||||
else:
|
||||
can_sock = None
|
||||
return sm, pm, can_sock
|
||||
|
||||
|
||||
def profile(proc, func, car='toyota'):
|
||||
segment, fingerprint = CARS[car]
|
||||
segment = segment.replace('|', '/')
|
||||
rlog_url = f"{BASE_URL}{segment}/rlog.bz2"
|
||||
msgs = list(LogReader(rlog_url)) * int(os.getenv("LOOP", "1"))
|
||||
|
||||
os.environ['FINGERPRINT'] = fingerprint
|
||||
os.environ['SKIP_FW_QUERY'] = "1"
|
||||
os.environ['REPLAY'] = "1"
|
||||
|
||||
def run(sm, pm, can_sock):
|
||||
try:
|
||||
if can_sock is not None:
|
||||
func(sm, pm, can_sock)
|
||||
else:
|
||||
func(sm, pm)
|
||||
except ReplayDone:
|
||||
pass
|
||||
|
||||
# Statistical
|
||||
sm, pm, can_sock = get_inputs(msgs, proc, fingerprint)
|
||||
with pprofile.StatisticalProfile()(period=0.00001) as pr:
|
||||
run(sm, pm, can_sock)
|
||||
pr.dump_stats(f'cachegrind.out.{proc}_statistical')
|
||||
|
||||
# Deterministic
|
||||
sm, pm, can_sock = get_inputs(msgs, proc, fingerprint)
|
||||
with cProfile.Profile() as pr:
|
||||
run(sm, pm, can_sock)
|
||||
pyprof2calltree.convert(pr.getstats(), f'cachegrind.out.{proc}_deterministic')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from openpilot.selfdrive.controls.controlsd import main as controlsd_thread
|
||||
from openpilot.selfdrive.locationd.paramsd import main as paramsd_thread
|
||||
from openpilot.selfdrive.controls.plannerd import main as plannerd_thread
|
||||
|
||||
procs = {
|
||||
'controlsd': controlsd_thread,
|
||||
'paramsd': paramsd_thread,
|
||||
'plannerd': plannerd_thread,
|
||||
}
|
||||
|
||||
proc = sys.argv[1]
|
||||
if proc not in procs:
|
||||
print(f"{proc} not available")
|
||||
sys.exit(0)
|
||||
else:
|
||||
profile(proc, procs[proc])
|
||||
10
selfdrive/test/scons_build_test.sh
Normal file
10
selfdrive/test/scons_build_test.sh
Normal file
@@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
SCRIPT_DIR=$(dirname "$0")
|
||||
BASEDIR=$(realpath "$SCRIPT_DIR/../../")
|
||||
cd $BASEDIR
|
||||
|
||||
# tests that our build system's dependencies are configured properly,
|
||||
# needs a machine with lots of cores
|
||||
scons --clean
|
||||
scons --no-cache --random -j$(nproc)
|
||||
10
selfdrive/test/setup_vsound.sh
Normal file
10
selfdrive/test/setup_vsound.sh
Normal file
@@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
{
|
||||
#start pulseaudio daemon
|
||||
sudo pulseaudio -D
|
||||
|
||||
# create a virtual null audio and set it to default device
|
||||
sudo pactl load-module module-null-sink sink_name=virtual_audio
|
||||
sudo pactl set-default-sink virtual_audio
|
||||
} > /dev/null 2>&1
|
||||
19
selfdrive/test/setup_xvfb.sh
Normal file
19
selfdrive/test/setup_xvfb.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Sets up a virtual display for running map renderer and simulator without an X11 display
|
||||
|
||||
DISP_ID=99
|
||||
export DISPLAY=:$DISP_ID
|
||||
|
||||
sudo Xvfb $DISPLAY -screen 0 2160x1080x24 2>/dev/null &
|
||||
|
||||
# check for x11 socket for the specified display ID
|
||||
while [ ! -S /tmp/.X11-unix/X$DISP_ID ]
|
||||
do
|
||||
echo "Waiting for Xvfb..."
|
||||
sleep 1
|
||||
done
|
||||
|
||||
touch ~/.Xauthority
|
||||
export XDG_SESSION_TYPE="x11"
|
||||
xset -q
|
||||
@@ -29,7 +29,7 @@ from openpilot.tools.lib.logreader import LogReader
|
||||
|
||||
# Baseline CPU usage by process
|
||||
PROCS = {
|
||||
"selfdrive.controls.controlsd": 41.0,
|
||||
"selfdrive.controls.controlsd": 46.0,
|
||||
"./loggerd": 14.0,
|
||||
"./encoderd": 17.0,
|
||||
"./camerad": 14.5,
|
||||
@@ -424,4 +424,4 @@ class TestOnroad(unittest.TestCase):
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
pytest.main()
|
||||
unittest.main()
|
||||
|
||||
302
selfdrive/test/test_updated.py
Normal file
302
selfdrive/test/test_updated.py
Normal file
@@ -0,0 +1,302 @@
|
||||
#!/usr/bin/env python3
|
||||
import datetime
|
||||
import os
|
||||
import pytest
|
||||
import time
|
||||
import tempfile
|
||||
import unittest
|
||||
import shutil
|
||||
import signal
|
||||
import subprocess
|
||||
import random
|
||||
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
from openpilot.common.params import Params
|
||||
|
||||
|
||||
@pytest.mark.tici
|
||||
class TestUpdated(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.updated_proc = None
|
||||
|
||||
self.tmp_dir = tempfile.TemporaryDirectory()
|
||||
org_dir = os.path.join(self.tmp_dir.name, "commaai")
|
||||
|
||||
self.basedir = os.path.join(org_dir, "openpilot")
|
||||
self.git_remote_dir = os.path.join(org_dir, "openpilot_remote")
|
||||
self.staging_dir = os.path.join(org_dir, "safe_staging")
|
||||
for d in [org_dir, self.basedir, self.git_remote_dir, self.staging_dir]:
|
||||
os.mkdir(d)
|
||||
|
||||
self.neos_version = os.path.join(org_dir, "neos_version")
|
||||
self.neosupdate_dir = os.path.join(org_dir, "neosupdate")
|
||||
with open(self.neos_version, "w") as f:
|
||||
v = subprocess.check_output(r"bash -c 'source launch_env.sh && echo $REQUIRED_NEOS_VERSION'",
|
||||
cwd=BASEDIR, shell=True, encoding='utf8').strip()
|
||||
f.write(v)
|
||||
|
||||
self.upper_dir = os.path.join(self.staging_dir, "upper")
|
||||
self.merged_dir = os.path.join(self.staging_dir, "merged")
|
||||
self.finalized_dir = os.path.join(self.staging_dir, "finalized")
|
||||
|
||||
# setup local submodule remotes
|
||||
submodules = subprocess.check_output("git submodule --quiet foreach 'echo $name'",
|
||||
shell=True, cwd=BASEDIR, encoding='utf8').split()
|
||||
for s in submodules:
|
||||
sub_path = os.path.join(org_dir, s.split("_repo")[0])
|
||||
self._run(f"git clone {s} {sub_path}.git", cwd=BASEDIR)
|
||||
|
||||
# setup two git repos, a remote and one we'll run updated in
|
||||
self._run([
|
||||
f"git clone {BASEDIR} {self.git_remote_dir}",
|
||||
f"git clone {self.git_remote_dir} {self.basedir}",
|
||||
f"cd {self.basedir} && git submodule init && git submodule update",
|
||||
f"cd {self.basedir} && scons -j{os.cpu_count()} cereal/ common/"
|
||||
])
|
||||
|
||||
self.params = Params(os.path.join(self.basedir, "persist/params"))
|
||||
self.params.clear_all()
|
||||
os.sync()
|
||||
|
||||
def tearDown(self):
|
||||
try:
|
||||
if self.updated_proc is not None:
|
||||
self.updated_proc.terminate()
|
||||
self.updated_proc.wait(30)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
self.tmp_dir.cleanup()
|
||||
|
||||
|
||||
# *** test helpers ***
|
||||
|
||||
|
||||
def _run(self, cmd, cwd=None):
|
||||
if not isinstance(cmd, list):
|
||||
cmd = (cmd,)
|
||||
|
||||
for c in cmd:
|
||||
subprocess.check_output(c, cwd=cwd, shell=True)
|
||||
|
||||
def _get_updated_proc(self):
|
||||
os.environ["PYTHONPATH"] = self.basedir
|
||||
os.environ["GIT_AUTHOR_NAME"] = "testy tester"
|
||||
os.environ["GIT_COMMITTER_NAME"] = "testy tester"
|
||||
os.environ["GIT_AUTHOR_EMAIL"] = "testy@tester.test"
|
||||
os.environ["GIT_COMMITTER_EMAIL"] = "testy@tester.test"
|
||||
os.environ["UPDATER_TEST_IP"] = "localhost"
|
||||
os.environ["UPDATER_LOCK_FILE"] = os.path.join(self.tmp_dir.name, "updater.lock")
|
||||
os.environ["UPDATER_STAGING_ROOT"] = self.staging_dir
|
||||
os.environ["UPDATER_NEOS_VERSION"] = self.neos_version
|
||||
os.environ["UPDATER_NEOSUPDATE_DIR"] = self.neosupdate_dir
|
||||
updated_path = os.path.join(self.basedir, "selfdrive/updated.py")
|
||||
return subprocess.Popen(updated_path, env=os.environ)
|
||||
|
||||
def _start_updater(self, offroad=True, nosleep=False):
|
||||
self.params.put_bool("IsOffroad", offroad)
|
||||
self.updated_proc = self._get_updated_proc()
|
||||
if not nosleep:
|
||||
time.sleep(1)
|
||||
|
||||
def _update_now(self):
|
||||
self.updated_proc.send_signal(signal.SIGHUP)
|
||||
|
||||
# TODO: this should be implemented in params
|
||||
def _read_param(self, key, timeout=1):
|
||||
ret = None
|
||||
start_time = time.monotonic()
|
||||
while ret is None:
|
||||
ret = self.params.get(key, encoding='utf8')
|
||||
if time.monotonic() - start_time > timeout:
|
||||
break
|
||||
time.sleep(0.01)
|
||||
return ret
|
||||
|
||||
def _wait_for_update(self, timeout=30, clear_param=False):
|
||||
if clear_param:
|
||||
self.params.remove("LastUpdateTime")
|
||||
|
||||
self._update_now()
|
||||
t = self._read_param("LastUpdateTime", timeout=timeout)
|
||||
if t is None:
|
||||
raise Exception("timed out waiting for update to complete")
|
||||
|
||||
def _make_commit(self):
|
||||
all_dirs, all_files = [], []
|
||||
for root, dirs, files in os.walk(self.git_remote_dir):
|
||||
if ".git" in root:
|
||||
continue
|
||||
for d in dirs:
|
||||
all_dirs.append(os.path.join(root, d))
|
||||
for f in files:
|
||||
all_files.append(os.path.join(root, f))
|
||||
|
||||
# make a new dir and some new files
|
||||
new_dir = os.path.join(self.git_remote_dir, "this_is_a_new_dir")
|
||||
os.mkdir(new_dir)
|
||||
for _ in range(random.randrange(5, 30)):
|
||||
for d in (new_dir, random.choice(all_dirs)):
|
||||
with tempfile.NamedTemporaryFile(dir=d, delete=False) as f:
|
||||
f.write(os.urandom(random.randrange(1, 1000000)))
|
||||
|
||||
# modify some files
|
||||
for f in random.sample(all_files, random.randrange(5, 50)):
|
||||
with open(f, "w+") as ff:
|
||||
txt = ff.readlines()
|
||||
ff.seek(0)
|
||||
for line in txt:
|
||||
ff.write(line[::-1])
|
||||
|
||||
# remove some files
|
||||
for f in random.sample(all_files, random.randrange(5, 50)):
|
||||
os.remove(f)
|
||||
|
||||
# remove some dirs
|
||||
for d in random.sample(all_dirs, random.randrange(1, 10)):
|
||||
shutil.rmtree(d)
|
||||
|
||||
# commit the changes
|
||||
self._run([
|
||||
"git add -A",
|
||||
"git commit -m 'an update'",
|
||||
], cwd=self.git_remote_dir)
|
||||
|
||||
def _check_update_state(self, update_available):
|
||||
# make sure LastUpdateTime is recent
|
||||
t = self._read_param("LastUpdateTime")
|
||||
last_update_time = datetime.datetime.fromisoformat(t)
|
||||
td = datetime.datetime.utcnow() - last_update_time
|
||||
self.assertLess(td.total_seconds(), 10)
|
||||
self.params.remove("LastUpdateTime")
|
||||
|
||||
# wait a bit for the rest of the params to be written
|
||||
time.sleep(0.1)
|
||||
|
||||
# check params
|
||||
update = self._read_param("UpdateAvailable")
|
||||
self.assertEqual(update == "1", update_available, f"UpdateAvailable: {repr(update)}")
|
||||
self.assertEqual(self._read_param("UpdateFailedCount"), "0")
|
||||
|
||||
# TODO: check that the finalized update actually matches remote
|
||||
# check the .overlay_init and .overlay_consistent flags
|
||||
self.assertTrue(os.path.isfile(os.path.join(self.basedir, ".overlay_init")))
|
||||
self.assertEqual(os.path.isfile(os.path.join(self.finalized_dir, ".overlay_consistent")), update_available)
|
||||
|
||||
|
||||
# *** test cases ***
|
||||
|
||||
|
||||
# Run updated for 100 cycles with no update
|
||||
def test_no_update(self):
|
||||
self._start_updater()
|
||||
for _ in range(100):
|
||||
self._wait_for_update(clear_param=True)
|
||||
self._check_update_state(False)
|
||||
|
||||
# Let the updater run with no update for a cycle, then write an update
|
||||
def test_update(self):
|
||||
self._start_updater()
|
||||
|
||||
# run for a cycle with no update
|
||||
self._wait_for_update(clear_param=True)
|
||||
self._check_update_state(False)
|
||||
|
||||
# write an update to our remote
|
||||
self._make_commit()
|
||||
|
||||
# run for a cycle to get the update
|
||||
self._wait_for_update(timeout=60, clear_param=True)
|
||||
self._check_update_state(True)
|
||||
|
||||
# run another cycle with no update
|
||||
self._wait_for_update(clear_param=True)
|
||||
self._check_update_state(True)
|
||||
|
||||
# Let the updater run for 10 cycles, and write an update every cycle
|
||||
@unittest.skip("need to make this faster")
|
||||
def test_update_loop(self):
|
||||
self._start_updater()
|
||||
|
||||
# run for a cycle with no update
|
||||
self._wait_for_update(clear_param=True)
|
||||
for _ in range(10):
|
||||
time.sleep(0.5)
|
||||
self._make_commit()
|
||||
self._wait_for_update(timeout=90, clear_param=True)
|
||||
self._check_update_state(True)
|
||||
|
||||
# Test overlay re-creation after tracking a new file in basedir's git
|
||||
def test_overlay_reinit(self):
|
||||
self._start_updater()
|
||||
|
||||
overlay_init_fn = os.path.join(self.basedir, ".overlay_init")
|
||||
|
||||
# run for a cycle with no update
|
||||
self._wait_for_update(clear_param=True)
|
||||
self.params.remove("LastUpdateTime")
|
||||
first_mtime = os.path.getmtime(overlay_init_fn)
|
||||
|
||||
# touch a file in the basedir
|
||||
self._run("touch new_file && git add new_file", cwd=self.basedir)
|
||||
|
||||
# run another cycle, should have a new mtime
|
||||
self._wait_for_update(clear_param=True)
|
||||
second_mtime = os.path.getmtime(overlay_init_fn)
|
||||
self.assertTrue(first_mtime != second_mtime)
|
||||
|
||||
# run another cycle, mtime should be same as last cycle
|
||||
self._wait_for_update(clear_param=True)
|
||||
new_mtime = os.path.getmtime(overlay_init_fn)
|
||||
self.assertTrue(second_mtime == new_mtime)
|
||||
|
||||
# Make sure updated exits if another instance is running
|
||||
def test_multiple_instances(self):
|
||||
# start updated and let it run for a cycle
|
||||
self._start_updater()
|
||||
time.sleep(1)
|
||||
self._wait_for_update(clear_param=True)
|
||||
|
||||
# start another instance
|
||||
second_updated = self._get_updated_proc()
|
||||
ret_code = second_updated.wait(timeout=5)
|
||||
self.assertTrue(ret_code is not None)
|
||||
|
||||
|
||||
# *** test cases with NEOS updates ***
|
||||
|
||||
|
||||
# Run updated with no update, make sure it clears the old NEOS update
|
||||
def test_clear_neos_cache(self):
|
||||
# make the dir and some junk files
|
||||
os.mkdir(self.neosupdate_dir)
|
||||
for _ in range(15):
|
||||
with tempfile.NamedTemporaryFile(dir=self.neosupdate_dir, delete=False) as f:
|
||||
f.write(os.urandom(random.randrange(1, 1000000)))
|
||||
|
||||
self._start_updater()
|
||||
self._wait_for_update(clear_param=True)
|
||||
self._check_update_state(False)
|
||||
self.assertFalse(os.path.isdir(self.neosupdate_dir))
|
||||
|
||||
# Let the updater run with no update for a cycle, then write an update
|
||||
@unittest.skip("TODO: only runs on device")
|
||||
def test_update_with_neos_update(self):
|
||||
# bump the NEOS version and commit it
|
||||
self._run([
|
||||
"echo 'export REQUIRED_NEOS_VERSION=3' >> launch_env.sh",
|
||||
"git -c user.name='testy' -c user.email='testy@tester.test' \
|
||||
commit -am 'a neos update'",
|
||||
], cwd=self.git_remote_dir)
|
||||
|
||||
# run for a cycle to get the update
|
||||
self._start_updater()
|
||||
self._wait_for_update(timeout=60, clear_param=True)
|
||||
self._check_update_state(True)
|
||||
|
||||
# TODO: more comprehensive check
|
||||
self.assertTrue(os.path.isdir(self.neosupdate_dir))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
117
selfdrive/test/test_valgrind_replay.py
Normal file
117
selfdrive/test/test_valgrind_replay.py
Normal file
@@ -0,0 +1,117 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import unittest
|
||||
import subprocess
|
||||
import signal
|
||||
|
||||
if "CI" in os.environ:
|
||||
def tqdm(x):
|
||||
return x
|
||||
else:
|
||||
from tqdm import tqdm # type: ignore
|
||||
|
||||
import cereal.messaging as messaging
|
||||
from collections import namedtuple
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
from openpilot.tools.lib.openpilotci import get_url
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
|
||||
ProcessConfig = namedtuple('ProcessConfig', ['proc_name', 'pub_sub', 'ignore', 'command', 'path', 'segment', 'wait_for_response'])
|
||||
|
||||
CONFIGS = [
|
||||
ProcessConfig(
|
||||
proc_name="ubloxd",
|
||||
pub_sub={
|
||||
"ubloxRaw": ["ubloxGnss", "gpsLocationExternal"],
|
||||
},
|
||||
ignore=[],
|
||||
command="./ubloxd",
|
||||
path="system/ubloxd",
|
||||
segment="0375fdf7b1ce594d|2019-06-13--08-32-25--3",
|
||||
wait_for_response=True
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class TestValgrind(unittest.TestCase):
|
||||
def extract_leak_sizes(self, log):
|
||||
if "All heap blocks were freed -- no leaks are possible" in log:
|
||||
return (0,0,0)
|
||||
|
||||
log = log.replace(",","") # fixes casting to int issue with large leaks
|
||||
err_lost1 = log.split("definitely lost: ")[1]
|
||||
err_lost2 = log.split("indirectly lost: ")[1]
|
||||
err_lost3 = log.split("possibly lost: ")[1]
|
||||
definitely_lost = int(err_lost1.split(" ")[0])
|
||||
indirectly_lost = int(err_lost2.split(" ")[0])
|
||||
possibly_lost = int(err_lost3.split(" ")[0])
|
||||
return (definitely_lost, indirectly_lost, possibly_lost)
|
||||
|
||||
def valgrindlauncher(self, arg, cwd):
|
||||
os.chdir(os.path.join(BASEDIR, cwd))
|
||||
# Run valgrind on a process
|
||||
command = "valgrind --leak-check=full " + arg
|
||||
p = subprocess.Popen(command, stderr=subprocess.PIPE, shell=True, preexec_fn=os.setsid)
|
||||
|
||||
while not self.replay_done:
|
||||
time.sleep(0.1)
|
||||
|
||||
# Kill valgrind and extract leak output
|
||||
os.killpg(os.getpgid(p.pid), signal.SIGINT)
|
||||
_, err = p.communicate()
|
||||
error_msg = str(err, encoding='utf-8')
|
||||
with open(os.path.join(BASEDIR, "selfdrive/test/valgrind_logs.txt"), "a") as f:
|
||||
f.write(error_msg)
|
||||
f.write(5 * "\n")
|
||||
definitely_lost, indirectly_lost, possibly_lost = self.extract_leak_sizes(error_msg)
|
||||
if max(definitely_lost, indirectly_lost, possibly_lost) > 0:
|
||||
self.leak = True
|
||||
print("LEAKS from", arg, "\nDefinitely lost:", definitely_lost, "\nIndirectly lost", indirectly_lost, "\nPossibly lost", possibly_lost)
|
||||
else:
|
||||
self.leak = False
|
||||
|
||||
def replay_process(self, config, logreader):
|
||||
pub_sockets = list(config.pub_sub.keys()) # We dump data from logs here
|
||||
sub_sockets = [s for _, sub in config.pub_sub.items() for s in sub] # We get responses here
|
||||
pm = messaging.PubMaster(pub_sockets)
|
||||
sm = messaging.SubMaster(sub_sockets)
|
||||
|
||||
print("Sorting logs")
|
||||
all_msgs = sorted(logreader, key=lambda msg: msg.logMonoTime)
|
||||
pub_msgs = [msg for msg in all_msgs if msg.which() in list(config.pub_sub.keys())]
|
||||
|
||||
thread = threading.Thread(target=self.valgrindlauncher, args=(config.command, config.path))
|
||||
thread.daemon = True
|
||||
thread.start()
|
||||
|
||||
while not all(pm.all_readers_updated(s) for s in config.pub_sub.keys()):
|
||||
time.sleep(0)
|
||||
|
||||
for msg in tqdm(pub_msgs):
|
||||
pm.send(msg.which(), msg.as_builder())
|
||||
if config.wait_for_response:
|
||||
sm.update(100)
|
||||
|
||||
self.replay_done = True
|
||||
|
||||
def test_config(self):
|
||||
open(os.path.join(BASEDIR, "selfdrive/test/valgrind_logs.txt"), "w").close()
|
||||
|
||||
for cfg in CONFIGS:
|
||||
self.leak = None
|
||||
self.replay_done = False
|
||||
|
||||
r, n = cfg.segment.rsplit("--", 1)
|
||||
lr = LogReader(get_url(r, n))
|
||||
self.replay_process(cfg, lr)
|
||||
|
||||
while self.leak is None:
|
||||
time.sleep(0.1) # Wait for the valgrind to finish
|
||||
|
||||
self.assertFalse(self.leak)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
84
selfdrive/test/update_ci_routes.py
Normal file
84
selfdrive/test/update_ci_routes.py
Normal file
@@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from collections.abc import Iterable
|
||||
|
||||
from tqdm import tqdm
|
||||
|
||||
from openpilot.selfdrive.car.tests.routes import routes as test_car_models_routes
|
||||
from openpilot.selfdrive.test.process_replay.test_processes import source_segments as replay_segments
|
||||
from openpilot.tools.lib.azure_container import AzureContainer
|
||||
from openpilot.tools.lib.openpilotcontainers import DataCIContainer, DataProdContainer, OpenpilotCIContainer
|
||||
|
||||
SOURCES: list[AzureContainer] = [
|
||||
DataProdContainer,
|
||||
DataCIContainer
|
||||
]
|
||||
|
||||
DEST = OpenpilotCIContainer
|
||||
|
||||
def upload_route(path: str, exclude_patterns: Iterable[str] = None) -> None:
|
||||
if exclude_patterns is None:
|
||||
exclude_patterns = [r'dcamera\.hevc']
|
||||
|
||||
r, n = path.rsplit("--", 1)
|
||||
r = '/'.join(r.split('/')[-2:]) # strip out anything extra in the path
|
||||
destpath = f"{r}/{n}"
|
||||
for file in os.listdir(path):
|
||||
if any(re.search(pattern, file) for pattern in exclude_patterns):
|
||||
continue
|
||||
DEST.upload_file(os.path.join(path, file), f"{destpath}/{file}")
|
||||
|
||||
|
||||
def sync_to_ci_public(route: str) -> bool:
|
||||
dest_container, dest_key = DEST.get_client_and_key()
|
||||
key_prefix = route.replace('|', '/')
|
||||
dongle_id = key_prefix.split('/')[0]
|
||||
|
||||
if next(dest_container.list_blob_names(name_starts_with=key_prefix), None) is not None:
|
||||
return True
|
||||
|
||||
print(f"Uploading {route}")
|
||||
for source_container in SOURCES:
|
||||
# assumes az login has been run
|
||||
print(f"Trying {source_container.ACCOUNT}/{source_container.CONTAINER}")
|
||||
_, source_key = source_container.get_client_and_key()
|
||||
cmd = [
|
||||
"azcopy",
|
||||
"copy",
|
||||
f"{source_container.BASE_URL}{key_prefix}?{source_key}",
|
||||
f"{DEST.BASE_URL}{dongle_id}?{dest_key}",
|
||||
"--recursive=true",
|
||||
"--overwrite=false",
|
||||
"--exclude-pattern=*/dcamera.hevc",
|
||||
]
|
||||
|
||||
try:
|
||||
result = subprocess.call(cmd, stdout=subprocess.DEVNULL)
|
||||
if result == 0:
|
||||
print("Success")
|
||||
return True
|
||||
except subprocess.CalledProcessError:
|
||||
print("Failed")
|
||||
|
||||
return False
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
failed_routes = []
|
||||
|
||||
to_sync = sys.argv[1:]
|
||||
|
||||
if not len(to_sync):
|
||||
# sync routes from the car tests routes and process replay
|
||||
to_sync.extend([rt.route for rt in test_car_models_routes])
|
||||
to_sync.extend([s[1].rsplit('--', 1)[0] for s in replay_segments])
|
||||
|
||||
for r in tqdm(to_sync):
|
||||
if not sync_to_ci_public(r):
|
||||
failed_routes.append(r)
|
||||
|
||||
if len(failed_routes):
|
||||
print("failed routes:", failed_routes)
|
||||
Reference in New Issue
Block a user