Add openpilot tests
This commit is contained in:
2
system/camerad/test/.gitignore
vendored
Normal file
2
system/camerad/test/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
jpegs/
|
||||
test_ae_gray
|
||||
27
system/camerad/test/check_skips.py
Normal file
27
system/camerad/test/check_skips.py
Normal file
@@ -0,0 +1,27 @@
|
||||
#!/usr/bin/env python3
|
||||
# type: ignore
|
||||
import cereal.messaging as messaging
|
||||
|
||||
all_sockets = ['roadCameraState', 'driverCameraState', 'wideRoadCameraState']
|
||||
prev_id = [None,None,None]
|
||||
this_id = [None,None,None]
|
||||
dt = [None,None,None]
|
||||
num_skipped = [0,0,0]
|
||||
|
||||
if __name__ == "__main__":
|
||||
sm = messaging.SubMaster(all_sockets)
|
||||
while True:
|
||||
sm.update()
|
||||
|
||||
for i in range(len(all_sockets)):
|
||||
if not sm.updated[all_sockets[i]]:
|
||||
continue
|
||||
this_id[i] = sm[all_sockets[i]].frameId
|
||||
if prev_id[i] is None:
|
||||
prev_id[i] = this_id[i]
|
||||
continue
|
||||
dt[i] = this_id[i] - prev_id[i]
|
||||
if dt[i] != 1:
|
||||
num_skipped[i] += dt[i] - 1
|
||||
print(all_sockets[i] ,dt[i] - 1, num_skipped[i])
|
||||
prev_id[i] = this_id[i]
|
||||
24
system/camerad/test/get_thumbnails_for_segment.py
Normal file
24
system/camerad/test/get_thumbnails_for_segment.py
Normal file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import os
|
||||
from tqdm import tqdm
|
||||
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("route", help="The route name")
|
||||
args = parser.parse_args()
|
||||
|
||||
out_path = os.path.join("jpegs", f"{args.route.replace('|', '_').replace('/', '_')}")
|
||||
os.makedirs(out_path, exist_ok=True)
|
||||
|
||||
lr = LogReader(args.route)
|
||||
|
||||
for msg in tqdm(lr):
|
||||
if msg.which() == 'thumbnail':
|
||||
with open(os.path.join(out_path, f"{msg.thumbnail.frameId}.jpg"), 'wb') as f:
|
||||
f.write(msg.thumbnail.thumbnail)
|
||||
elif msg.which() == 'navThumbnail':
|
||||
with open(os.path.join(out_path, f"nav_{msg.navThumbnail.frameId}.jpg"), 'wb') as f:
|
||||
f.write(msg.navThumbnail.thumbnail)
|
||||
9
system/camerad/test/stress_restart.sh
Normal file
9
system/camerad/test/stress_restart.sh
Normal file
@@ -0,0 +1,9 @@
|
||||
#!/bin/sh
|
||||
cd ..
|
||||
while :; do
|
||||
./camerad &
|
||||
pid="$!"
|
||||
sleep 2
|
||||
kill -2 $pid
|
||||
wait $pid
|
||||
done
|
||||
83
system/camerad/test/test_ae_gray.cc
Normal file
83
system/camerad/test/test_ae_gray.cc
Normal file
@@ -0,0 +1,83 @@
|
||||
#define CATCH_CONFIG_MAIN
|
||||
#include "catch2/catch.hpp"
|
||||
|
||||
#include <cassert>
|
||||
|
||||
#include <cmath>
|
||||
#include <cstring>
|
||||
|
||||
#include "common/util.h"
|
||||
#include "system/camerad/cameras/camera_common.h"
|
||||
|
||||
#define W 240
|
||||
#define H 160
|
||||
|
||||
|
||||
#define TONE_SPLITS 3
|
||||
|
||||
float gts[TONE_SPLITS * TONE_SPLITS * TONE_SPLITS * TONE_SPLITS] = {
|
||||
0.917969, 0.917969, 0.375000, 0.917969, 0.375000, 0.375000, 0.187500, 0.187500, 0.187500, 0.917969,
|
||||
0.375000, 0.375000, 0.187500, 0.187500, 0.187500, 0.187500, 0.187500, 0.187500, 0.093750, 0.093750,
|
||||
0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.917969, 0.375000, 0.375000,
|
||||
0.187500, 0.187500, 0.187500, 0.187500, 0.187500, 0.187500, 0.093750, 0.093750, 0.093750, 0.093750,
|
||||
0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750, 0.093750,
|
||||
0.093750, 0.093750, 0.093750, 0.093750, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000,
|
||||
0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000,
|
||||
0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000,
|
||||
0.000000};
|
||||
|
||||
|
||||
TEST_CASE("camera.test_set_exposure_target") {
|
||||
// set up fake camerabuf
|
||||
CameraBuf cb = {};
|
||||
VisionBuf vb = {};
|
||||
uint8_t * fb_y = new uint8_t[W*H];
|
||||
vb.y = fb_y;
|
||||
cb.cur_yuv_buf = &vb;
|
||||
cb.rgb_width = W;
|
||||
cb.rgb_height = H;
|
||||
|
||||
printf("AE test patterns %dx%d\n", cb.rgb_width, cb.rgb_height);
|
||||
|
||||
// mix of 5 tones
|
||||
uint8_t l[5] = {0, 24, 48, 96, 235}; // 235 is yuv max
|
||||
|
||||
bool passed = true;
|
||||
float rtol = 0.05;
|
||||
// generate pattern and calculate EV
|
||||
int cnt = 0;
|
||||
for (int i_0=0; i_0<TONE_SPLITS; i_0++) {
|
||||
for (int i_1=0; i_1<TONE_SPLITS; i_1++) {
|
||||
for (int i_2=0; i_2<TONE_SPLITS; i_2++) {
|
||||
for (int i_3=0; i_3<TONE_SPLITS; i_3++) {
|
||||
int h_0 = i_0 * H / TONE_SPLITS;
|
||||
int h_1 = i_1 * (H - h_0) / TONE_SPLITS;
|
||||
int h_2 = i_2 * (H - h_0 - h_1) / TONE_SPLITS;
|
||||
int h_3 = i_3 * (H - h_0 - h_1 - h_2) / TONE_SPLITS;
|
||||
int h_4 = H - h_0 - h_1 - h_2 - h_3;
|
||||
memset(&fb_y[0], l[0], h_0*W);
|
||||
memset(&fb_y[h_0*W], l[1], h_1*W);
|
||||
memset(&fb_y[h_0*W+h_1*W], l[2], h_2*W);
|
||||
memset(&fb_y[h_0*W+h_1*W+h_2*W], l[3], h_3*W);
|
||||
memset(&fb_y[h_0*W+h_1*W+h_2*W+h_3*W], l[4], h_4*W);
|
||||
float ev = set_exposure_target((const CameraBuf*) &cb, 0, W-1, 1, 0, H-1, 1);
|
||||
// printf("%d/%d/%d/%d/%d ev is %f\n", h_0, h_1, h_2, h_3, h_4, ev);
|
||||
// printf("%f\n", ev);
|
||||
|
||||
// compare to gt
|
||||
float evgt = gts[cnt];
|
||||
if (fabs(ev - evgt) > rtol*evgt) {
|
||||
passed = false;
|
||||
}
|
||||
|
||||
// report
|
||||
printf("%d/%d/%d/%d/%d: ev %f, gt %f, err %f\n", h_0, h_1, h_2, h_3, h_4, ev, evgt, fabs(ev - evgt) / (evgt != 0 ? evgt : 0.00001f));
|
||||
cnt++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
assert(passed);
|
||||
|
||||
delete[] fb_y;
|
||||
}
|
||||
83
system/camerad/test/test_camerad.py
Normal file
83
system/camerad/test/test_camerad.py
Normal file
@@ -0,0 +1,83 @@
|
||||
#!/usr/bin/env python3
|
||||
import pytest
|
||||
import time
|
||||
import numpy as np
|
||||
from flaky import flaky
|
||||
from collections import defaultdict
|
||||
|
||||
import cereal.messaging as messaging
|
||||
from cereal import log
|
||||
from cereal.services import SERVICE_LIST
|
||||
from openpilot.selfdrive.manager.process_config import managed_processes
|
||||
|
||||
TEST_TIMESPAN = 30
|
||||
LAG_FRAME_TOLERANCE = {log.FrameData.ImageSensor.ar0231: 0.5, # ARs use synced pulses for frame starts
|
||||
log.FrameData.ImageSensor.ox03c10: 1.1} # OXs react to out-of-sync at next frame
|
||||
FRAME_DELTA_TOLERANCE = {log.FrameData.ImageSensor.ar0231: 1.0,
|
||||
log.FrameData.ImageSensor.ox03c10: 1.0}
|
||||
|
||||
CAMERAS = ('roadCameraState', 'driverCameraState', 'wideRoadCameraState')
|
||||
|
||||
# TODO: this shouldn't be needed
|
||||
@flaky(max_runs=3)
|
||||
@pytest.mark.tici
|
||||
class TestCamerad:
|
||||
def setup_method(self):
|
||||
# run camerad and record logs
|
||||
managed_processes['camerad'].start()
|
||||
time.sleep(3)
|
||||
socks = {c: messaging.sub_sock(c, conflate=False, timeout=100) for c in CAMERAS}
|
||||
|
||||
self.logs = defaultdict(list)
|
||||
start_time = time.monotonic()
|
||||
while time.monotonic()- start_time < TEST_TIMESPAN:
|
||||
for cam, s in socks.items():
|
||||
self.logs[cam] += messaging.drain_sock(s)
|
||||
time.sleep(0.2)
|
||||
managed_processes['camerad'].stop()
|
||||
|
||||
self.log_by_frame_id = defaultdict(list)
|
||||
self.sensor_type = None
|
||||
for cam, msgs in self.logs.items():
|
||||
if self.sensor_type is None:
|
||||
self.sensor_type = getattr(msgs[0], msgs[0].which()).sensor.raw
|
||||
expected_frames = SERVICE_LIST[cam].frequency * TEST_TIMESPAN
|
||||
assert expected_frames*0.95 < len(msgs) < expected_frames*1.05, f"unexpected frame count {cam}: {expected_frames=}, got {len(msgs)}"
|
||||
|
||||
dts = np.abs(np.diff([getattr(m, m.which()).timestampSof/1e6 for m in msgs]) - 1000/SERVICE_LIST[cam].frequency)
|
||||
assert (dts < FRAME_DELTA_TOLERANCE[self.sensor_type]).all(), f"{cam} dts(ms) out of spec: max diff {dts.max()}, 99 percentile {np.percentile(dts, 99)}"
|
||||
|
||||
for m in msgs:
|
||||
self.log_by_frame_id[getattr(m, m.which()).frameId].append(m)
|
||||
|
||||
# strip beginning and end
|
||||
for _ in range(3):
|
||||
mn, mx = min(self.log_by_frame_id.keys()), max(self.log_by_frame_id.keys())
|
||||
del self.log_by_frame_id[mn]
|
||||
del self.log_by_frame_id[mx]
|
||||
|
||||
def test_frame_skips(self):
|
||||
skips = {}
|
||||
frame_ids = self.log_by_frame_id.keys()
|
||||
for frame_id in range(min(frame_ids), max(frame_ids)):
|
||||
seen_cams = [msg.which() for msg in self.log_by_frame_id[frame_id]]
|
||||
skip_cams = set(CAMERAS) - set(seen_cams)
|
||||
if len(skip_cams):
|
||||
skips[frame_id] = skip_cams
|
||||
assert len(skips) == 0, f"Found frame skips, missing cameras for the following frames: {skips}"
|
||||
|
||||
def test_frame_sync(self):
|
||||
frame_times = {frame_id: [getattr(m, m.which()).timestampSof for m in msgs] for frame_id, msgs in self.log_by_frame_id.items()}
|
||||
diffs = {frame_id: (max(ts) - min(ts))/1e6 for frame_id, ts in frame_times.items()}
|
||||
|
||||
def get_desc(fid, diff):
|
||||
cam_times = [(m.which(), getattr(m, m.which()).timestampSof/1e6) for m in self.log_by_frame_id[fid]]
|
||||
return (diff, cam_times)
|
||||
laggy_frames = {k: get_desc(k, v) for k, v in diffs.items() if v > LAG_FRAME_TOLERANCE[self.sensor_type]}
|
||||
|
||||
def in_tol(diff):
|
||||
return 50 - LAG_FRAME_TOLERANCE[self.sensor_type] < diff and diff < 50 + LAG_FRAME_TOLERANCE[self.sensor_type]
|
||||
if len(laggy_frames) != 0 and all( in_tol(laggy_frames[lf][0]) for lf in laggy_frames):
|
||||
print("TODO: handle camera out of sync")
|
||||
else:
|
||||
assert len(laggy_frames) == 0, f"Frames not synced properly: {laggy_frames=}"
|
||||
55
system/camerad/test/test_exposure.py
Normal file
55
system/camerad/test/test_exposure.py
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/env python3
|
||||
import time
|
||||
import unittest
|
||||
import numpy as np
|
||||
|
||||
from openpilot.selfdrive.test.helpers import with_processes, phone_only
|
||||
from openpilot.system.camerad.snapshot.snapshot import get_snapshots
|
||||
|
||||
TEST_TIME = 45
|
||||
REPEAT = 5
|
||||
|
||||
class TestCamerad(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
pass
|
||||
|
||||
def _numpy_rgb2gray(self, im):
|
||||
ret = np.clip(im[:,:,2] * 0.114 + im[:,:,1] * 0.587 + im[:,:,0] * 0.299, 0, 255).astype(np.uint8)
|
||||
return ret
|
||||
|
||||
def _is_exposure_okay(self, i, med_mean=None):
|
||||
if med_mean is None:
|
||||
med_mean = np.array([[0.2,0.4],[0.2,0.6]])
|
||||
h, w = i.shape[:2]
|
||||
i = i[h//10:9*h//10,w//10:9*w//10]
|
||||
med_ex, mean_ex = med_mean
|
||||
i = self._numpy_rgb2gray(i)
|
||||
i_median = np.median(i) / 255.
|
||||
i_mean = np.mean(i) / 255.
|
||||
print([i_median, i_mean])
|
||||
return med_ex[0] < i_median < med_ex[1] and mean_ex[0] < i_mean < mean_ex[1]
|
||||
|
||||
@phone_only
|
||||
@with_processes(['camerad'])
|
||||
def test_camera_operation(self):
|
||||
passed = 0
|
||||
start = time.time()
|
||||
while time.time() - start < TEST_TIME and passed < REPEAT:
|
||||
rpic, dpic = get_snapshots(frame="roadCameraState", front_frame="driverCameraState")
|
||||
wpic, _ = get_snapshots(frame="wideRoadCameraState")
|
||||
|
||||
res = self._is_exposure_okay(rpic)
|
||||
res = res and self._is_exposure_okay(dpic)
|
||||
res = res and self._is_exposure_okay(wpic)
|
||||
|
||||
if passed > 0 and not res:
|
||||
passed = -passed # fails test if any failure after first sus
|
||||
break
|
||||
|
||||
passed += int(res)
|
||||
time.sleep(2)
|
||||
self.assertGreaterEqual(passed, REPEAT)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user