Add openpilot tools

This commit is contained in:
FrogAi
2024-02-27 16:34:47 -07:00
parent b39097a12d
commit a083b93479
156 changed files with 16482 additions and 0 deletions

5
tools/replay/.gitignore vendored Normal file
View File

@@ -0,0 +1,5 @@
moc_*
*.moc
replay
tests/test_replay

87
tools/replay/README.md Normal file
View File

@@ -0,0 +1,87 @@
# Replay
## Replay driving data
`replay` replays all the messages logged while running openpilot.
```bash
# Log in via browser to have access to routes from your comma account
python tools/lib/auth.py
# Start a replay
tools/replay/replay <route-name>
# Example:
tools/replay/replay 'a2a0ccea32023010|2023-07-27--13-01-19'
# or use --demo to replay the default demo route:
tools/replay/replay --demo
# watch the replay with the normal openpilot UI
cd selfdrive/ui && ./ui
# or try out a debug visualizer:
python replay/ui.py
```
## usage
``` bash
$ tools/replay/replay -h
Usage: tools/replay/replay [options] route
Mock openpilot components by publishing logged messages.
Options:
-h, --help Displays this help.
-a, --allow <allow> whitelist of services to send
-b, --block <block> blacklist of services to send
-s, --start <seconds> start from <seconds>
--demo use a demo route instead of providing your own
--dcam load driver camera
--ecam load wide road camera
Arguments:
route the drive to replay. find your drives at
connect.comma.ai
```
## watch3
watch all three cameras simultaneously from your comma three routes with watch3
simply replay a route using the `--dcam` and `--ecam` flags:
```bash
# start a replay
cd tools/replay && ./replay --demo --dcam --ecam
# then start watch3
cd selfdrive/ui && ./watch3
```
![](https://i.imgur.com/IeaOdAb.png)
## Stream CAN messages to your device
Replay CAN messages as they were recorded using a [panda jungle](https://comma.ai/shop/products/panda-jungle). The jungle has 6x OBD-C ports for connecting all your comma devices. Check out the [jungle repo](https://github.com/commaai/panda_jungle) for more info.
In order to run your device as if it was in a car:
* connect a panda jungle to your PC
* connect a comma device or panda to the jungle via OBD-C
* run `can_replay.py`
``` bash
batman:replay$ ./can_replay.py -h
usage: can_replay.py [-h] [route_or_segment_name]
Replay CAN messages from a route to all connected pandas and jungles
in a loop.
positional arguments:
route_or_segment_name
The route or segment name to replay. If not
specified, a default public route will be
used. (default: None)
optional arguments:
-h, --help show this help message and exit
```

21
tools/replay/SConscript Normal file
View File

@@ -0,0 +1,21 @@
Import('env', 'qt_env', 'arch', 'common', 'messaging', 'visionipc', 'cereal')
base_frameworks = qt_env['FRAMEWORKS']
base_libs = [common, messaging, cereal, visionipc, 'zmq',
'capnp', 'kj', 'm', 'ssl', 'crypto', 'pthread', 'qt_util'] + qt_env["LIBS"]
if arch == "Darwin":
base_frameworks.append('OpenCL')
else:
base_libs.append('OpenCL')
qt_env['CXXFLAGS'] += ["-Wno-deprecated-declarations"]
replay_lib_src = ["replay.cc", "consoleui.cc", "camera.cc", "filereader.cc", "logreader.cc", "framereader.cc", "route.cc", "util.cc"]
replay_lib = qt_env.Library("qt_replay", replay_lib_src, LIBS=base_libs, FRAMEWORKS=base_frameworks)
Export('replay_lib')
replay_libs = [replay_lib, 'avutil', 'avcodec', 'avformat', 'bz2', 'curl', 'yuv', 'ncurses'] + base_libs
qt_env.Program("replay", ["main.cc"], LIBS=replay_libs, FRAMEWORKS=base_frameworks)
if GetOption('extras'):
qt_env.Program('tests/test_replay', ['tests/test_runner.cc', 'tests/test_replay.cc'], LIBS=[replay_libs, base_libs])

0
tools/replay/__init__.py Normal file
View File

107
tools/replay/can_replay.py Normal file
View File

@@ -0,0 +1,107 @@
#!/usr/bin/env python3
import argparse
import os
import time
import threading
import multiprocessing
from tqdm import tqdm
os.environ['FILEREADER_CACHE'] = '1'
from openpilot.common.realtime import config_realtime_process, Ratekeeper, DT_CTRL
from openpilot.selfdrive.boardd.boardd import can_capnp_to_can_list
from openpilot.tools.plotjuggler.juggle import load_segment
from openpilot.tools.lib.logreader import logreader_from_route_or_segment
from panda import Panda, PandaJungle
def send_thread(s, flock):
if "Jungle" in str(type(s)):
if "FLASH" in os.environ:
with flock:
s.flash()
for i in [0, 1, 2, 3, 0xFFFF]:
s.can_clear(i)
s.set_ignition(False)
time.sleep(5)
s.set_ignition(True)
s.set_panda_power(True)
else:
s.set_safety_mode(Panda.SAFETY_ALLOUTPUT)
s.set_can_loopback(False)
idx = 0
ign = True
rk = Ratekeeper(1 / DT_CTRL, print_delay_threshold=None)
while True:
# handle ignition cycling
if ENABLE_IGN:
i = (rk.frame*DT_CTRL) % (IGN_ON + IGN_OFF) < IGN_ON
if i != ign:
ign = i
s.set_ignition(ign)
snd = CAN_MSGS[idx]
snd = list(filter(lambda x: x[-1] <= 2, snd))
s.can_send_many(snd)
idx = (idx + 1) % len(CAN_MSGS)
# Drain panda message buffer
s.can_recv()
rk.keep_time()
def connect():
config_realtime_process(3, 55)
serials = {}
flashing_lock = threading.Lock()
while True:
# look for new devices
for p in [Panda, PandaJungle]:
if p is None:
continue
for s in p.list():
if s not in serials:
print("starting send thread for", s)
serials[s] = threading.Thread(target=send_thread, args=(p(s), flashing_lock))
serials[s].start()
# try to join all send threads
cur_serials = serials.copy()
for s, t in cur_serials.items():
t.join(0.01)
if not t.is_alive():
del serials[s]
time.sleep(1)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Replay CAN messages from a route to all connected pandas and jungles in a loop.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("route_or_segment_name", nargs='?', help="The route or segment name to replay. If not specified, a default public route will be used.")
args = parser.parse_args()
print("Loading log...")
if args.route_or_segment_name is None:
ROUTE = "77611a1fac303767/2020-03-24--09-50-38"
REPLAY_SEGS = list(range(10, 16)) # route has 82 segments available
CAN_MSGS = []
logs = [f"https://commadataci.blob.core.windows.net/openpilotci/{ROUTE}/{i}/rlog.bz2" for i in REPLAY_SEGS]
with multiprocessing.Pool(24) as pool:
for lr in tqdm(pool.map(load_segment, logs)):
CAN_MSGS += [can_capnp_to_can_list(m.can) for m in lr if m.which() == 'can']
else:
lr = logreader_from_route_or_segment(args.route_or_segment_name)
CAN_MSGS = [can_capnp_to_can_list(m.can) for m in lr if m.which() == 'can']
# set both to cycle ignition
IGN_ON = int(os.getenv("ON", "0"))
IGN_OFF = int(os.getenv("OFF", "0"))
ENABLE_IGN = IGN_ON > 0 and IGN_OFF > 0
if ENABLE_IGN:
print(f"Cycling ignition: on for {IGN_ON}s, off for {IGN_OFF}s")
connect()

View File

View File

@@ -0,0 +1,268 @@
import itertools
from typing import Any, Dict, Tuple
import matplotlib.pyplot as plt
import numpy as np
import pygame
from matplotlib.backends.backend_agg import FigureCanvasAgg
from openpilot.common.transformations.camera import (eon_f_frame_size, eon_f_focal_length,
tici_f_frame_size, tici_f_focal_length,
get_view_frame_from_calib_frame)
from openpilot.selfdrive.controls.radard import RADAR_TO_CAMERA
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
YELLOW = (255, 255, 0)
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
_FULL_FRAME_SIZE = {
}
class UIParams:
lidar_x, lidar_y, lidar_zoom = 384, 960, 6
lidar_car_x, lidar_car_y = lidar_x / 2., lidar_y / 1.1
car_hwidth = 1.7272 / 2 * lidar_zoom
car_front = 2.6924 * lidar_zoom
car_back = 1.8796 * lidar_zoom
car_color = 110
UP = UIParams
_BB_TO_FULL_FRAME = {}
_CALIB_BB_TO_FULL = {}
_FULL_FRAME_TO_BB = {}
_INTRINSICS = {}
eon_f_qcam_frame_size = (480, 360)
tici_f_qcam_frame_size = (528, 330)
cams = [(eon_f_frame_size, eon_f_focal_length, eon_f_frame_size),
(tici_f_frame_size, tici_f_focal_length, tici_f_frame_size),
(eon_f_qcam_frame_size, eon_f_focal_length, eon_f_frame_size),
(tici_f_qcam_frame_size, tici_f_focal_length, tici_f_frame_size)]
for size, focal, full_size in cams:
sz = size[0] * size[1]
_BB_SCALE = size[0] / 640.
_BB_TO_FULL_FRAME[sz] = np.asarray([
[_BB_SCALE, 0., 0.],
[0., _BB_SCALE, 0.],
[0., 0., 1.]])
calib_scale = full_size[0] / 640.
_CALIB_BB_TO_FULL[sz] = np.asarray([
[calib_scale, 0., 0.],
[0., calib_scale, 0.],
[0., 0., 1.]])
_FULL_FRAME_TO_BB[sz] = np.linalg.inv(_BB_TO_FULL_FRAME[sz])
_FULL_FRAME_SIZE[sz] = (size[0], size[1])
_INTRINSICS[sz] = np.array([
[focal, 0., full_size[0] / 2.],
[0., focal, full_size[1] / 2.],
[0., 0., 1.]])
METER_WIDTH = 20
class Calibration:
def __init__(self, num_px, rpy, intrinsic):
self.intrinsic = intrinsic
self.extrinsics_matrix = get_view_frame_from_calib_frame(rpy[0], rpy[1], rpy[2], 0.0)[:,:3]
self.zoom = _CALIB_BB_TO_FULL[num_px][0, 0]
def car_space_to_ff(self, x, y, z):
car_space_projective = np.column_stack((x, y, z)).T
ep = self.extrinsics_matrix.dot(car_space_projective)
kep = self.intrinsic.dot(ep)
return (kep[:-1, :] / kep[-1, :]).T
def car_space_to_bb(self, x, y, z):
pts = self.car_space_to_ff(x, y, z)
return pts / self.zoom
_COLOR_CACHE : Dict[Tuple[int, int, int], Any] = {}
def find_color(lidar_surface, color):
if color in _COLOR_CACHE:
return _COLOR_CACHE[color]
tcolor = 0
ret = 255
for x in lidar_surface.get_palette():
if x[0:3] == color:
ret = tcolor
break
tcolor += 1
_COLOR_CACHE[color] = ret
return ret
def to_topdown_pt(y, x):
px, py = x * UP.lidar_zoom + UP.lidar_car_x, -y * UP.lidar_zoom + UP.lidar_car_y
if px > 0 and py > 0 and px < UP.lidar_x and py < UP.lidar_y:
return int(px), int(py)
return -1, -1
def draw_path(path, color, img, calibration, top_down, lid_color=None, z_off=0):
x, y, z = np.asarray(path.x), np.asarray(path.y), np.asarray(path.z) + z_off
pts = calibration.car_space_to_bb(x, y, z)
pts = np.round(pts).astype(int)
# draw lidar path point on lidar
# find color in 8 bit
if lid_color is not None and top_down is not None:
tcolor = find_color(top_down[0], lid_color)
for i in range(len(x)):
px, py = to_topdown_pt(x[i], y[i])
if px != -1:
top_down[1][px, py] = tcolor
height, width = img.shape[:2]
for x, y in pts:
if 1 < x < width - 1 and 1 < y < height - 1:
for a, b in itertools.permutations([-1, 0, -1], 2):
img[y + a, x + b] = color
def init_plots(arr, name_to_arr_idx, plot_xlims, plot_ylims, plot_names, plot_colors, plot_styles):
color_palette = { "r": (1, 0, 0),
"g": (0, 1, 0),
"b": (0, 0, 1),
"k": (0, 0, 0),
"y": (1, 1, 0),
"p": (0, 1, 1),
"m": (1, 0, 1)}
dpi = 90
fig = plt.figure(figsize=(575 / dpi, 600 / dpi), dpi=dpi)
canvas = FigureCanvasAgg(fig)
fig.set_facecolor((0.2, 0.2, 0.2))
axs = []
for pn in range(len(plot_ylims)):
ax = fig.add_subplot(len(plot_ylims), 1, len(axs)+1)
ax.set_xlim(plot_xlims[pn][0], plot_xlims[pn][1])
ax.set_ylim(plot_ylims[pn][0], plot_ylims[pn][1])
ax.patch.set_facecolor((0.4, 0.4, 0.4))
axs.append(ax)
plots, idxs, plot_select = [], [], []
for i, pl_list in enumerate(plot_names):
for j, item in enumerate(pl_list):
plot, = axs[i].plot(arr[:, name_to_arr_idx[item]],
label=item,
color=color_palette[plot_colors[i][j]],
linestyle=plot_styles[i][j])
plots.append(plot)
idxs.append(name_to_arr_idx[item])
plot_select.append(i)
axs[i].set_title(", ".join(f"{nm} ({cl})"
for (nm, cl) in zip(pl_list, plot_colors[i], strict=False)), fontsize=10)
axs[i].tick_params(axis="x", colors="white")
axs[i].tick_params(axis="y", colors="white")
axs[i].title.set_color("white")
if i < len(plot_ylims) - 1:
axs[i].set_xticks([])
canvas.draw()
def draw_plots(arr):
for ax in axs:
ax.draw_artist(ax.patch)
for i in range(len(plots)):
plots[i].set_ydata(arr[:, idxs[i]])
axs[plot_select[i]].draw_artist(plots[i])
raw_data = canvas.buffer_rgba()
plot_surface = pygame.image.frombuffer(raw_data, canvas.get_width_height(), "RGBA").convert()
return plot_surface
return draw_plots
def pygame_modules_have_loaded():
return pygame.display.get_init() and pygame.font.get_init()
def plot_model(m, img, calibration, top_down):
if calibration is None or top_down is None:
return
for lead in m.leadsV3:
if lead.prob < 0.5:
continue
x, y = lead.x[0], lead.y[0]
x_std = lead.xStd[0]
x -= RADAR_TO_CAMERA
_, py_top = to_topdown_pt(x + x_std, y)
px, py_bottom = to_topdown_pt(x - x_std, y)
top_down[1][int(round(px - 4)):int(round(px + 4)), py_top:py_bottom] = find_color(top_down[0], YELLOW)
for path, prob, _ in zip(m.laneLines, m.laneLineProbs, m.laneLineStds, strict=True):
color = (0, int(255 * prob), 0)
draw_path(path, color, img, calibration, top_down, YELLOW)
for edge, std in zip(m.roadEdges, m.roadEdgeStds, strict=True):
prob = max(1 - std, 0)
color = (int(255 * prob), 0, 0)
draw_path(edge, color, img, calibration, top_down, RED)
color = (255, 0, 0)
draw_path(m.position, color, img, calibration, top_down, RED, 1.22)
def plot_lead(rs, top_down):
for lead in [rs.leadOne, rs.leadTwo]:
if not lead.status:
continue
x = lead.dRel
px_left, py = to_topdown_pt(x, -10)
px_right, _ = to_topdown_pt(x, 10)
top_down[1][px_left:px_right, py] = find_color(top_down[0], RED)
def maybe_update_radar_points(lt, lid_overlay):
ar_pts = []
if lt is not None:
ar_pts = {}
for track in lt:
ar_pts[track.trackId] = [track.dRel, track.yRel, track.vRel, track.aRel, track.oncoming, track.stationary]
for ids, pt in ar_pts.items():
# negative here since radar is left positive
px, py = to_topdown_pt(pt[0], -pt[1])
if px != -1:
if pt[-1]:
color = 240
elif pt[-2]:
color = 230
else:
color = 255
if int(ids) == 1:
lid_overlay[px - 2:px + 2, py - 10:py + 10] = 100
else:
lid_overlay[px - 2:px + 2, py - 2:py + 2] = color
def get_blank_lid_overlay(UP):
lid_overlay = np.zeros((UP.lidar_x, UP.lidar_y), 'uint8')
# Draw the car.
lid_overlay[int(round(UP.lidar_car_x - UP.car_hwidth)):int(
round(UP.lidar_car_x + UP.car_hwidth)), int(round(UP.lidar_car_y -
UP.car_front))] = UP.car_color
lid_overlay[int(round(UP.lidar_car_x - UP.car_hwidth)):int(
round(UP.lidar_car_x + UP.car_hwidth)), int(round(UP.lidar_car_y +
UP.car_back))] = UP.car_color
lid_overlay[int(round(UP.lidar_car_x - UP.car_hwidth)), int(
round(UP.lidar_car_y - UP.car_front)):int(round(
UP.lidar_car_y + UP.car_back))] = UP.car_color
lid_overlay[int(round(UP.lidar_car_x + UP.car_hwidth)), int(
round(UP.lidar_car_y - UP.car_front)):int(round(
UP.lidar_car_y + UP.car_back))] = UP.car_color
return lid_overlay

View File

@@ -0,0 +1,220 @@
#include <chrono>
#include <thread>
#include <QDebug>
#include <QEventLoop>
#include "catch2/catch.hpp"
#include "common/util.h"
#include "tools/replay/replay.h"
#include "tools/replay/util.h"
const std::string TEST_RLOG_URL = "https://commadataci.blob.core.windows.net/openpilotci/0c94aa1e1296d7c6/2021-05-05--19-48-37/0/rlog.bz2";
const std::string TEST_RLOG_CHECKSUM = "5b966d4bb21a100a8c4e59195faeb741b975ccbe268211765efd1763d892bfb3";
bool download_to_file(const std::string &url, const std::string &local_file, int chunk_size = 5 * 1024 * 1024, int retries = 3) {
do {
if (httpDownload(url, local_file, chunk_size)) {
return true;
}
std::this_thread::sleep_for(std::chrono::milliseconds(500));
} while (--retries >= 0);
return false;
}
TEST_CASE("httpMultiPartDownload") {
char filename[] = "/tmp/XXXXXX";
close(mkstemp(filename));
const size_t chunk_size = 5 * 1024 * 1024;
std::string content;
SECTION("download to file") {
REQUIRE(download_to_file(TEST_RLOG_URL, filename, chunk_size));
content = util::read_file(filename);
}
SECTION("download to buffer") {
for (int i = 0; i < 3 && content.empty(); ++i) {
content = httpGet(TEST_RLOG_URL, chunk_size);
std::this_thread::sleep_for(std::chrono::milliseconds(500));
}
REQUIRE(!content.empty());
}
REQUIRE(content.size() == 9112651);
REQUIRE(sha256(content) == TEST_RLOG_CHECKSUM);
}
TEST_CASE("FileReader") {
auto enable_local_cache = GENERATE(true, false);
std::string cache_file = cacheFilePath(TEST_RLOG_URL);
system(("rm " + cache_file + " -f").c_str());
FileReader reader(enable_local_cache);
std::string content = reader.read(TEST_RLOG_URL);
REQUIRE(sha256(content) == TEST_RLOG_CHECKSUM);
if (enable_local_cache) {
REQUIRE(sha256(util::read_file(cache_file)) == TEST_RLOG_CHECKSUM);
} else {
REQUIRE(util::file_exists(cache_file) == false);
}
}
TEST_CASE("LogReader") {
SECTION("corrupt log") {
FileReader reader(true);
std::string corrupt_content = reader.read(TEST_RLOG_URL);
corrupt_content.resize(corrupt_content.length() / 2);
corrupt_content = decompressBZ2(corrupt_content);
LogReader log;
REQUIRE(log.load((std::byte *)corrupt_content.data(), corrupt_content.size()));
REQUIRE(log.events.size() > 0);
}
}
void read_segment(int n, const SegmentFile &segment_file, uint32_t flags) {
QEventLoop loop;
Segment segment(n, segment_file, flags);
QObject::connect(&segment, &Segment::loadFinished, [&]() {
REQUIRE(segment.isLoaded() == true);
REQUIRE(segment.log != nullptr);
REQUIRE(segment.frames[RoadCam] != nullptr);
if (flags & REPLAY_FLAG_DCAM) {
REQUIRE(segment.frames[DriverCam] != nullptr);
}
if (flags & REPLAY_FLAG_ECAM) {
REQUIRE(segment.frames[WideRoadCam] != nullptr);
}
// test LogReader & FrameReader
REQUIRE(segment.log->events.size() > 0);
REQUIRE(std::is_sorted(segment.log->events.begin(), segment.log->events.end(), Event::lessThan()));
for (auto cam : ALL_CAMERAS) {
auto &fr = segment.frames[cam];
if (!fr) continue;
if (cam == RoadCam || cam == WideRoadCam) {
REQUIRE(fr->getFrameCount() == 1200);
}
auto [nv12_width, nv12_height, nv12_buffer_size] = get_nv12_info(fr->width, fr->height);
VisionBuf buf;
buf.allocate(nv12_buffer_size);
buf.init_yuv(fr->width, fr->height, nv12_width, nv12_width * nv12_height);
// sequence get 100 frames
for (int i = 0; i < 100; ++i) {
REQUIRE(fr->get(i, &buf));
}
}
loop.quit();
});
loop.exec();
}
std::string download_demo_route() {
static std::string data_dir;
if (data_dir == "") {
char tmp_path[] = "/tmp/root_XXXXXX";
data_dir = mkdtemp(tmp_path);
Route remote_route(DEMO_ROUTE);
assert(remote_route.load());
// Create a local route from remote for testing
const std::string route_name = DEMO_ROUTE.mid(17).toStdString();
for (int i = 0; i < 2; ++i) {
std::string log_path = util::string_format("%s/%s--%d/", data_dir.c_str(), route_name.c_str(), i);
util::create_directories(log_path, 0755);
REQUIRE(download_to_file(remote_route.at(i).rlog.toStdString(), log_path + "rlog.bz2"));
REQUIRE(download_to_file(remote_route.at(i).driver_cam.toStdString(), log_path + "dcamera.hevc"));
REQUIRE(download_to_file(remote_route.at(i).wide_road_cam.toStdString(), log_path + "ecamera.hevc"));
REQUIRE(download_to_file(remote_route.at(i).qcamera.toStdString(), log_path + "qcamera.ts"));
}
}
return data_dir;
}
TEST_CASE("Local route") {
std::string data_dir = download_demo_route();
auto flags = GENERATE(REPLAY_FLAG_DCAM | REPLAY_FLAG_ECAM, REPLAY_FLAG_QCAMERA);
Route route(DEMO_ROUTE, QString::fromStdString(data_dir));
REQUIRE(route.load());
REQUIRE(route.segments().size() == 2);
for (int i = 0; i < route.segments().size(); ++i) {
read_segment(i, route.at(i), flags);
}
}
TEST_CASE("Remote route") {
auto flags = GENERATE(REPLAY_FLAG_DCAM | REPLAY_FLAG_ECAM, REPLAY_FLAG_QCAMERA);
Route route(DEMO_ROUTE);
REQUIRE(route.load());
REQUIRE(route.segments().size() == 13);
for (int i = 0; i < 2; ++i) {
read_segment(i, route.at(i), flags);
}
}
// helper class for unit tests
class TestReplay : public Replay {
public:
TestReplay(const QString &route, uint32_t flags = REPLAY_FLAG_NO_FILE_CACHE | REPLAY_FLAG_NO_VIPC) : Replay(route, {}, {}, nullptr, flags) {}
void test_seek();
void testSeekTo(int seek_to);
};
void TestReplay::testSeekTo(int seek_to) {
seekTo(seek_to, false);
while (true) {
std::unique_lock lk(stream_lock_);
stream_cv_.wait(lk, [=]() { return events_updated_ == true; });
events_updated_ = false;
if (cur_mono_time_ != route_start_ts_ + seek_to * 1e9) {
// wake up by the previous merging, skip it.
continue;
}
Event cur_event(cereal::Event::Which::INIT_DATA, cur_mono_time_);
auto eit = std::upper_bound(events_->begin(), events_->end(), &cur_event, Event::lessThan());
if (eit == events_->end()) {
qDebug() << "waiting for events...";
continue;
}
REQUIRE(std::is_sorted(events_->begin(), events_->end(), Event::lessThan()));
const int seek_to_segment = seek_to / 60;
const int event_seconds = ((*eit)->mono_time - route_start_ts_) / 1e9;
current_segment_ = event_seconds / 60;
INFO("seek to [" << seek_to << "s segment " << seek_to_segment << "], events [" << event_seconds << "s segment" << current_segment_ << "]");
REQUIRE(event_seconds >= seek_to);
if (event_seconds > seek_to) {
auto it = segments_.lower_bound(seek_to_segment);
REQUIRE(it->first == current_segment_);
}
break;
}
}
void TestReplay::test_seek() {
// create a dummy stream thread
stream_thread_ = new QThread(this);
QEventLoop loop;
std::thread thread = std::thread([&]() {
for (int i = 0; i < 10; ++i) {
testSeekTo(util::random_int(0, 2 * 60));
}
loop.quit();
});
loop.exec();
thread.join();
}
TEST_CASE("Replay") {
TestReplay replay(DEMO_ROUTE);
REQUIRE(replay.load());
replay.test_seek();
}

View File

@@ -0,0 +1,10 @@
#define CATCH_CONFIG_RUNNER
#include "catch2/catch.hpp"
#include <QCoreApplication>
int main(int argc, char **argv) {
// unit tests for Qt
QCoreApplication app(argc, argv);
const int res = Catch::Session().run(argc, argv);
return (res < 0xff ? res : 0xff);
}

228
tools/replay/ui.py Normal file
View File

@@ -0,0 +1,228 @@
#!/usr/bin/env python3
import argparse
import os
import sys
import cv2
import numpy as np
import pygame
import cereal.messaging as messaging
from openpilot.common.numpy_fast import clip
from openpilot.common.basedir import BASEDIR
from openpilot.tools.replay.lib.ui_helpers import (_BB_TO_FULL_FRAME, UP,
_INTRINSICS, BLACK, GREEN,
YELLOW, Calibration,
get_blank_lid_overlay, init_plots,
maybe_update_radar_points, plot_lead,
plot_model,
pygame_modules_have_loaded)
from cereal.visionipc import VisionIpcClient, VisionStreamType
os.environ['BASEDIR'] = BASEDIR
ANGLE_SCALE = 5.0
def ui_thread(addr):
cv2.setNumThreads(1)
pygame.init()
pygame.font.init()
assert pygame_modules_have_loaded()
disp_info = pygame.display.Info()
max_height = disp_info.current_h
hor_mode = os.getenv("HORIZONTAL") is not None
hor_mode = True if max_height < 960+300 else hor_mode
if hor_mode:
size = (640+384+640, 960)
write_x = 5
write_y = 680
else:
size = (640+384, 960+300)
write_x = 645
write_y = 970
pygame.display.set_caption("openpilot debug UI")
screen = pygame.display.set_mode(size, pygame.DOUBLEBUF)
alert1_font = pygame.font.SysFont("arial", 30)
alert2_font = pygame.font.SysFont("arial", 20)
info_font = pygame.font.SysFont("arial", 15)
camera_surface = pygame.surface.Surface((640, 480), 0, 24).convert()
top_down_surface = pygame.surface.Surface((UP.lidar_x, UP.lidar_y), 0, 8)
sm = messaging.SubMaster(['carState', 'longitudinalPlan', 'carControl', 'radarState', 'liveCalibration', 'controlsState',
'liveTracks', 'modelV2', 'liveParameters', 'lateralPlan'], addr=addr)
img = np.zeros((480, 640, 3), dtype='uint8')
imgff = None
num_px = 0
calibration = None
lid_overlay_blank = get_blank_lid_overlay(UP)
# plots
name_to_arr_idx = { "gas": 0,
"computer_gas": 1,
"user_brake": 2,
"computer_brake": 3,
"v_ego": 4,
"v_pid": 5,
"angle_steers_des": 6,
"angle_steers": 7,
"angle_steers_k": 8,
"steer_torque": 9,
"v_override": 10,
"v_cruise": 11,
"a_ego": 12,
"a_target": 13}
plot_arr = np.zeros((100, len(name_to_arr_idx.values())))
plot_xlims = [(0, plot_arr.shape[0]), (0, plot_arr.shape[0]), (0, plot_arr.shape[0]), (0, plot_arr.shape[0])]
plot_ylims = [(-0.1, 1.1), (-ANGLE_SCALE, ANGLE_SCALE), (0., 75.), (-3.0, 2.0)]
plot_names = [["gas", "computer_gas", "user_brake", "computer_brake"],
["angle_steers", "angle_steers_des", "angle_steers_k", "steer_torque"],
["v_ego", "v_override", "v_pid", "v_cruise"],
["a_ego", "a_target"]]
plot_colors = [["b", "b", "g", "r", "y"],
["b", "g", "y", "r"],
["b", "g", "r", "y"],
["b", "r"]]
plot_styles = [["-", "-", "-", "-", "-"],
["-", "-", "-", "-"],
["-", "-", "-", "-"],
["-", "-"]]
draw_plots = init_plots(plot_arr, name_to_arr_idx, plot_xlims, plot_ylims, plot_names, plot_colors, plot_styles)
vipc_client = VisionIpcClient("camerad", VisionStreamType.VISION_STREAM_ROAD, True)
while 1:
list(pygame.event.get())
screen.fill((64, 64, 64))
lid_overlay = lid_overlay_blank.copy()
top_down = top_down_surface, lid_overlay
# ***** frame *****
if not vipc_client.is_connected():
vipc_client.connect(True)
yuv_img_raw = vipc_client.recv()
if yuv_img_raw is None or not yuv_img_raw.data.any():
continue
imgff = np.frombuffer(yuv_img_raw.data, dtype=np.uint8).reshape((len(yuv_img_raw.data) // vipc_client.stride, vipc_client.stride))
num_px = vipc_client.width * vipc_client.height
bgr = cv2.cvtColor(imgff[:vipc_client.height * 3 // 2, :vipc_client.width], cv2.COLOR_YUV2RGB_NV12)
zoom_matrix = _BB_TO_FULL_FRAME[num_px]
cv2.warpAffine(bgr, zoom_matrix[:2], (img.shape[1], img.shape[0]), dst=img, flags=cv2.WARP_INVERSE_MAP)
intrinsic_matrix = _INTRINSICS[num_px]
sm.update(0)
w = sm['controlsState'].lateralControlState.which()
if w == 'lqrStateDEPRECATED':
angle_steers_k = sm['controlsState'].lateralControlState.lqrStateDEPRECATED.steeringAngleDeg
elif w == 'indiState':
angle_steers_k = sm['controlsState'].lateralControlState.indiState.steeringAngleDeg
else:
angle_steers_k = np.inf
plot_arr[:-1] = plot_arr[1:]
plot_arr[-1, name_to_arr_idx['angle_steers']] = sm['carState'].steeringAngleDeg
plot_arr[-1, name_to_arr_idx['angle_steers_des']] = sm['carControl'].actuators.steeringAngleDeg
plot_arr[-1, name_to_arr_idx['angle_steers_k']] = angle_steers_k
plot_arr[-1, name_to_arr_idx['gas']] = sm['carState'].gas
# TODO gas is deprecated
plot_arr[-1, name_to_arr_idx['computer_gas']] = clip(sm['carControl'].actuators.accel/4.0, 0.0, 1.0)
plot_arr[-1, name_to_arr_idx['user_brake']] = sm['carState'].brake
plot_arr[-1, name_to_arr_idx['steer_torque']] = sm['carControl'].actuators.steer * ANGLE_SCALE
# TODO brake is deprecated
plot_arr[-1, name_to_arr_idx['computer_brake']] = clip(-sm['carControl'].actuators.accel/4.0, 0.0, 1.0)
plot_arr[-1, name_to_arr_idx['v_ego']] = sm['carState'].vEgo
plot_arr[-1, name_to_arr_idx['v_pid']] = sm['controlsState'].vPid
plot_arr[-1, name_to_arr_idx['v_cruise']] = sm['carState'].cruiseState.speed
plot_arr[-1, name_to_arr_idx['a_ego']] = sm['carState'].aEgo
if len(sm['longitudinalPlan'].accels):
plot_arr[-1, name_to_arr_idx['a_target']] = sm['longitudinalPlan'].accels[0]
if sm.rcv_frame['modelV2']:
plot_model(sm['modelV2'], img, calibration, top_down)
if sm.rcv_frame['radarState']:
plot_lead(sm['radarState'], top_down)
# draw all radar points
maybe_update_radar_points(sm['liveTracks'], top_down[1])
if sm.updated['liveCalibration'] and num_px:
rpyCalib = np.asarray(sm['liveCalibration'].rpyCalib)
calibration = Calibration(num_px, rpyCalib, intrinsic_matrix)
# *** blits ***
pygame.surfarray.blit_array(camera_surface, img.swapaxes(0, 1))
screen.blit(camera_surface, (0, 0))
# display alerts
alert_line1 = alert1_font.render(sm['controlsState'].alertText1, True, (255, 0, 0))
alert_line2 = alert2_font.render(sm['controlsState'].alertText2, True, (255, 0, 0))
screen.blit(alert_line1, (180, 150))
screen.blit(alert_line2, (180, 190))
if hor_mode:
screen.blit(draw_plots(plot_arr), (640+384, 0))
else:
screen.blit(draw_plots(plot_arr), (0, 600))
pygame.surfarray.blit_array(*top_down)
screen.blit(top_down[0], (640, 0))
SPACING = 25
lines = [
info_font.render("ENABLED", True, GREEN if sm['controlsState'].enabled else BLACK),
info_font.render("SPEED: " + str(round(sm['carState'].vEgo, 1)) + " m/s", True, YELLOW),
info_font.render("LONG CONTROL STATE: " + str(sm['controlsState'].longControlState), True, YELLOW),
info_font.render("LONG MPC SOURCE: " + str(sm['longitudinalPlan'].longitudinalPlanSource), True, YELLOW),
None,
info_font.render("ANGLE OFFSET (AVG): " + str(round(sm['liveParameters'].angleOffsetAverageDeg, 2)) + " deg", True, YELLOW),
info_font.render("ANGLE OFFSET (INSTANT): " + str(round(sm['liveParameters'].angleOffsetDeg, 2)) + " deg", True, YELLOW),
info_font.render("STIFFNESS: " + str(round(sm['liveParameters'].stiffnessFactor * 100., 2)) + " %", True, YELLOW),
info_font.render("STEER RATIO: " + str(round(sm['liveParameters'].steerRatio, 2)), True, YELLOW)
]
for i, line in enumerate(lines):
if line is not None:
screen.blit(line, (write_x, write_y + i * SPACING))
# this takes time...vsync or something
pygame.display.flip()
def get_arg_parser():
parser = argparse.ArgumentParser(
description="Show replay data in a UI.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("ip_address", nargs="?", default="127.0.0.1",
help="The ip address on which to receive zmq messages.")
parser.add_argument("--frame-address", default=None,
help="The frame address (fully qualified ZMQ endpoint for frames) on which to receive zmq messages.")
return parser
if __name__ == "__main__":
args = get_arg_parser().parse_args(sys.argv[1:])
if args.ip_address != "127.0.0.1":
os.environ["ZMQ"] = "1"
messaging.context = messaging.Context()
ui_thread(args.ip_address)

View File

@@ -0,0 +1,108 @@
#!/usr/bin/env python3
import argparse
import bisect
import select
import sys
import termios
import time
import tty
from collections import defaultdict
import cereal.messaging as messaging
from openpilot.tools.lib.framereader import FrameReader
from openpilot.tools.lib.logreader import LogReader
from openpilot.selfdrive.test.openpilotci import get_url
IGNORE = ['initData', 'sentinel']
def input_ready():
return select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], [])
def replay(route, segment, loop):
route = route.replace('|', '/')
lr = LogReader(get_url(route, segment))
fr = FrameReader(get_url(route, segment, "fcamera"), readahead=True)
# Build mapping from frameId to segmentId from roadEncodeIdx, type == fullHEVC
msgs = [m for m in lr if m.which() not in IGNORE]
msgs = sorted(msgs, key=lambda m: m.logMonoTime)
times = [m.logMonoTime for m in msgs]
frame_idx = {m.roadEncodeIdx.frameId: m.roadEncodeIdx.segmentId for m in msgs if m.which() == 'roadEncodeIdx' and m.roadEncodeIdx.type == 'fullHEVC'}
socks = {}
lag = 0.0
i = 0
max_i = len(msgs) - 2
while True:
msg = msgs[i].as_builder()
next_msg = msgs[i + 1]
start_time = time.time()
w = msg.which()
if w == 'roadCameraState':
try:
img = fr.get(frame_idx[msg.roadCameraState.frameId], pix_fmt="rgb24")
img = img[0][:, :, ::-1] # Convert RGB to BGR, which is what the camera outputs
msg.roadCameraState.image = img.flatten().tobytes()
except (KeyError, ValueError):
pass
if w not in socks:
socks[w] = messaging.pub_sock(w)
try:
if socks[w]:
socks[w].send(msg.to_bytes())
except messaging.messaging_pyx.MultiplePublishersError:
socks[w] = None
lag += (next_msg.logMonoTime - msg.logMonoTime) / 1e9
lag -= time.time() - start_time
dt = max(lag, 0.0)
lag -= dt
time.sleep(dt)
if lag < -1.0 and i % 1000 == 0:
print(f"{-lag:.2f} s behind")
if input_ready():
key = sys.stdin.read(1)
# Handle pause
if key == " ":
while True:
if input_ready() and sys.stdin.read(1) == " ":
break
time.sleep(0.01)
# Handle seek
dt = defaultdict(int, s=10, S=-10)[key]
new_time = msgs[i].logMonoTime + dt * 1e9
i = bisect.bisect_left(times, new_time)
i = (i + 1) % max_i if loop else min(i + 1, max_i)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--loop", action='store_true')
parser.add_argument("route")
parser.add_argument("segment")
args = parser.parse_args()
orig_settings = termios.tcgetattr(sys.stdin)
tty.setcbreak(sys.stdin)
try:
replay(args.route, args.segment, args.loop)
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, orig_settings)
except Exception:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, orig_settings)
raise