openpilot v0.9.6 release

date: 2024-01-12T10:13:37
master commit: ba792d576a49a0899b88a753fa1c52956bedf9e6
This commit is contained in:
FrogAi
2024-01-12 22:39:28 -07:00
commit 08e9fb1edc
1881 changed files with 653708 additions and 0 deletions

103
tools/replay/camera.cc Normal file
View File

@@ -0,0 +1,103 @@
#include "tools/replay/camera.h"
#include <cassert>
#include <tuple>
#include "third_party/linux/include/msm_media_info.h"
#include "tools/replay/util.h"
std::tuple<size_t, size_t, size_t> get_nv12_info(int width, int height) {
int nv12_width = VENUS_Y_STRIDE(COLOR_FMT_NV12, width);
int nv12_height = VENUS_Y_SCANLINES(COLOR_FMT_NV12, height);
assert(nv12_width == VENUS_UV_STRIDE(COLOR_FMT_NV12, width));
assert(nv12_height / 2 == VENUS_UV_SCANLINES(COLOR_FMT_NV12, height));
size_t nv12_buffer_size = 2346 * nv12_width; // comes from v4l2_format.fmt.pix_mp.plane_fmt[0].sizeimage
return {nv12_width, nv12_height, nv12_buffer_size};
}
CameraServer::CameraServer(std::pair<int, int> camera_size[MAX_CAMERAS]) {
for (int i = 0; i < MAX_CAMERAS; ++i) {
std::tie(cameras_[i].width, cameras_[i].height) = camera_size[i];
}
startVipcServer();
}
CameraServer::~CameraServer() {
for (auto &cam : cameras_) {
if (cam.thread.joinable()) {
cam.queue.push({});
cam.thread.join();
}
}
vipc_server_.reset(nullptr);
}
void CameraServer::startVipcServer() {
vipc_server_.reset(new VisionIpcServer("camerad"));
for (auto &cam : cameras_) {
if (cam.width > 0 && cam.height > 0) {
rInfo("camera[%d] frame size %dx%d", cam.type, cam.width, cam.height);
auto [nv12_width, nv12_height, nv12_buffer_size] = get_nv12_info(cam.width, cam.height);
vipc_server_->create_buffers_with_sizes(cam.stream_type, YUV_BUFFER_COUNT, false, cam.width, cam.height,
nv12_buffer_size, nv12_width, nv12_width * nv12_height);
if (!cam.thread.joinable()) {
cam.thread = std::thread(&CameraServer::cameraThread, this, std::ref(cam));
}
}
}
vipc_server_->start_listener();
}
void CameraServer::cameraThread(Camera &cam) {
auto read_frame = [&](FrameReader *fr, int frame_id) {
VisionBuf *yuv_buf = vipc_server_->get_buffer(cam.stream_type);
assert(yuv_buf);
bool ret = fr->get(frame_id, yuv_buf);
return ret ? yuv_buf : nullptr;
};
while (true) {
const auto [fr, eidx] = cam.queue.pop();
if (!fr) break;
const int id = eidx.getSegmentId();
bool prefetched = (id == cam.cached_id && eidx.getSegmentNum() == cam.cached_seg);
auto yuv = prefetched ? cam.cached_buf : read_frame(fr, id);
if (yuv) {
VisionIpcBufExtra extra = {
.frame_id = eidx.getFrameId(),
.timestamp_sof = eidx.getTimestampSof(),
.timestamp_eof = eidx.getTimestampEof(),
};
yuv->set_frame_id(eidx.getFrameId());
vipc_server_->send(yuv, &extra);
} else {
rError("camera[%d] failed to get frame: %lu", cam.type, eidx.getSegmentId());
}
cam.cached_id = id + 1;
cam.cached_seg = eidx.getSegmentNum();
cam.cached_buf = read_frame(fr, cam.cached_id);
--publishing_;
}
}
void CameraServer::pushFrame(CameraType type, FrameReader *fr, const cereal::EncodeIndex::Reader &eidx) {
auto &cam = cameras_[type];
if (cam.width != fr->width || cam.height != fr->height) {
cam.width = fr->width;
cam.height = fr->height;
waitForSent();
startVipcServer();
}
++publishing_;
cam.queue.push({fr, eidx});
}
void CameraServer::waitForSent() {
while (publishing_ > 0) {
std::this_thread::yield();
}
}

45
tools/replay/camera.h Normal file
View File

@@ -0,0 +1,45 @@
#pragma once
#include <unistd.h>
#include <memory>
#include <tuple>
#include <utility>
#include "cereal/visionipc/visionipc_server.h"
#include "common/queue.h"
#include "tools/replay/framereader.h"
#include "tools/replay/logreader.h"
std::tuple<size_t, size_t, size_t> get_nv12_info(int width, int height);
class CameraServer {
public:
CameraServer(std::pair<int, int> camera_size[MAX_CAMERAS] = nullptr);
~CameraServer();
void pushFrame(CameraType type, FrameReader* fr, const cereal::EncodeIndex::Reader& eidx);
void waitForSent();
protected:
struct Camera {
CameraType type;
VisionStreamType stream_type;
int width;
int height;
std::thread thread;
SafeQueue<std::pair<FrameReader*, const cereal::EncodeIndex::Reader>> queue;
int cached_id = -1;
int cached_seg = -1;
VisionBuf * cached_buf;
};
void startVipcServer();
void cameraThread(Camera &cam);
Camera cameras_[MAX_CAMERAS] = {
{.type = RoadCam, .stream_type = VISION_STREAM_ROAD},
{.type = DriverCam, .stream_type = VISION_STREAM_DRIVER},
{.type = WideRoadCam, .stream_type = VISION_STREAM_WIDE_ROAD},
};
std::atomic<int> publishing_ = 0;
std::unique_ptr<VisionIpcServer> vipc_server_;
};

374
tools/replay/consoleui.cc Normal file
View File

@@ -0,0 +1,374 @@
#include "tools/replay/consoleui.h"
#include <initializer_list>
#include <string>
#include <tuple>
#include <utility>
#include <QApplication>
#include "common/util.h"
#include "common/version.h"
namespace {
const int BORDER_SIZE = 3;
const std::initializer_list<std::pair<std::string, std::string>> keyboard_shortcuts[] = {
{
{"s", "+10s"},
{"shift+s", "-10s"},
{"m", "+60s"},
{"shift+m", "-60s"},
{"space", "Pause/Resume"},
{"e", "Next Engagement"},
{"d", "Next Disengagement"},
{"t", "Next User Tag"},
{"i", "Next Info"},
{"w", "Next Warning"},
{"c", "Next Critical"},
},
{
{"enter", "Enter seek request"},
{"+/-", "Playback speed"},
{"q", "Exit"},
},
};
enum Color {
Default,
Debug,
Yellow,
Green,
Red,
Cyan,
BrightWhite,
Engaged,
Disengaged,
};
void add_str(WINDOW *w, const char *str, Color color = Color::Default, bool bold = false) {
if (color != Color::Default) wattron(w, COLOR_PAIR(color));
if (bold) wattron(w, A_BOLD);
waddstr(w, str);
if (bold) wattroff(w, A_BOLD);
if (color != Color::Default) wattroff(w, COLOR_PAIR(color));
}
} // namespace
ConsoleUI::ConsoleUI(Replay *replay, QObject *parent) : replay(replay), sm({"carState", "liveParameters"}), QObject(parent) {
// Initialize curses
initscr();
clear();
curs_set(false);
cbreak(); // Line buffering disabled. pass on everything
noecho();
keypad(stdscr, true);
nodelay(stdscr, true); // non-blocking getchar()
// Initialize all the colors. https://www.ditig.com/256-colors-cheat-sheet
start_color();
init_pair(Color::Debug, 246, COLOR_BLACK); // #949494
init_pair(Color::Yellow, 184, COLOR_BLACK);
init_pair(Color::Red, COLOR_RED, COLOR_BLACK);
init_pair(Color::Cyan, COLOR_CYAN, COLOR_BLACK);
init_pair(Color::BrightWhite, 15, COLOR_BLACK);
init_pair(Color::Disengaged, COLOR_BLUE, COLOR_BLUE);
init_pair(Color::Engaged, 28, 28);
init_pair(Color::Green, 34, COLOR_BLACK);
initWindows();
qRegisterMetaType<uint64_t>("uint64_t");
qRegisterMetaType<ReplyMsgType>("ReplyMsgType");
installMessageHandler([this](ReplyMsgType type, const std::string msg) {
emit logMessageSignal(type, QString::fromStdString(msg));
});
installDownloadProgressHandler([this](uint64_t cur, uint64_t total, bool success) {
emit updateProgressBarSignal(cur, total, success);
});
QObject::connect(replay, &Replay::streamStarted, this, &ConsoleUI::updateSummary);
QObject::connect(&notifier, SIGNAL(activated(int)), SLOT(readyRead()));
QObject::connect(this, &ConsoleUI::updateProgressBarSignal, this, &ConsoleUI::updateProgressBar);
QObject::connect(this, &ConsoleUI::logMessageSignal, this, &ConsoleUI::logMessage);
sm_timer.callOnTimeout(this, &ConsoleUI::updateStatus);
sm_timer.start(100);
getch_timer.start(1000, this);
readyRead();
}
ConsoleUI::~ConsoleUI() {
endwin();
}
void ConsoleUI::initWindows() {
getmaxyx(stdscr, max_height, max_width);
w.fill(nullptr);
w[Win::Title] = newwin(1, max_width, 0, 0);
w[Win::Stats] = newwin(2, max_width - 2 * BORDER_SIZE, 2, BORDER_SIZE);
w[Win::Timeline] = newwin(4, max_width - 2 * BORDER_SIZE, 5, BORDER_SIZE);
w[Win::TimelineDesc] = newwin(1, 100, 10, BORDER_SIZE);
w[Win::CarState] = newwin(3, 100, 12, BORDER_SIZE);
w[Win::DownloadBar] = newwin(1, 100, 16, BORDER_SIZE);
if (int log_height = max_height - 27; log_height > 4) {
w[Win::LogBorder] = newwin(log_height, max_width - 2 * (BORDER_SIZE - 1), 17, BORDER_SIZE - 1);
box(w[Win::LogBorder], 0, 0);
w[Win::Log] = newwin(log_height - 2, max_width - 2 * BORDER_SIZE, 18, BORDER_SIZE);
scrollok(w[Win::Log], true);
}
w[Win::Help] = newwin(5, max_width - (2 * BORDER_SIZE), max_height - 6, BORDER_SIZE);
// set the title bar
wbkgd(w[Win::Title], A_REVERSE);
mvwprintw(w[Win::Title], 0, 3, "openpilot replay %s", COMMA_VERSION);
// show windows on the real screen
refresh();
displayTimelineDesc();
displayHelp();
updateSummary();
updateTimeline();
for (auto win : w) {
if (win) wrefresh(win);
}
}
void ConsoleUI::timerEvent(QTimerEvent *ev) {
if (ev->timerId() != getch_timer.timerId()) return;
if (is_term_resized(max_height, max_width)) {
for (auto win : w) {
if (win) delwin(win);
}
endwin();
clear();
refresh();
initWindows();
rWarning("resize term %dx%d", max_height, max_width);
}
updateTimeline();
}
void ConsoleUI::updateStatus() {
auto write_item = [this](int y, int x, const char *key, const std::string &value, const std::string &unit,
bool bold = false, Color color = Color::BrightWhite) {
auto win = w[Win::CarState];
wmove(win, y, x);
add_str(win, key);
add_str(win, value.c_str(), color, bold);
add_str(win, unit.c_str());
};
static const std::pair<const char *, Color> status_text[] = {
{"loading...", Color::Red},
{"playing", Color::Green},
{"paused...", Color::Yellow},
};
sm.update(0);
if (status != Status::Paused) {
auto events = replay->events();
uint64_t current_mono_time = replay->routeStartTime() + replay->currentSeconds() * 1e9;
bool playing = !events->empty() && events->back()->mono_time > current_mono_time;
status = playing ? Status::Playing : Status::Waiting;
}
auto [status_str, status_color] = status_text[status];
write_item(0, 0, "STATUS: ", status_str, " ", false, status_color);
std::string current_segment = " - " + std::to_string((int)(replay->currentSeconds() / 60));
write_item(0, 25, "TIME: ", replay->currentDateTime().toString("ddd MMMM dd hh:mm:ss").toStdString(), current_segment, true);
auto p = sm["liveParameters"].getLiveParameters();
write_item(1, 0, "STIFFNESS: ", util::string_format("%.2f %%", p.getStiffnessFactor() * 100), " ");
write_item(1, 25, "SPEED: ", util::string_format("%.2f", sm["carState"].getCarState().getVEgo()), " m/s");
write_item(2, 0, "STEER RATIO: ", util::string_format("%.2f", p.getSteerRatio()), "");
auto angle_offsets = util::string_format("%.2f|%.2f", p.getAngleOffsetAverageDeg(), p.getAngleOffsetDeg());
write_item(2, 25, "ANGLE OFFSET(AVG|INSTANT): ", angle_offsets, " deg");
wrefresh(w[Win::CarState]);
}
void ConsoleUI::displayHelp() {
for (int i = 0; i < std::size(keyboard_shortcuts); ++i) {
wmove(w[Win::Help], i * 2, 0);
for (auto &[key, desc] : keyboard_shortcuts[i]) {
wattron(w[Win::Help], A_REVERSE);
waddstr(w[Win::Help], (' ' + key + ' ').c_str());
wattroff(w[Win::Help], A_REVERSE);
waddstr(w[Win::Help], (' ' + desc + ' ').c_str());
}
}
wrefresh(w[Win::Help]);
}
void ConsoleUI::displayTimelineDesc() {
std::tuple<Color, const char *, bool> indicators[]{
{Color::Engaged, " Engaged ", false},
{Color::Disengaged, " Disengaged ", false},
{Color::Green, " Info ", true},
{Color::Yellow, " Warning ", true},
{Color::Red, " Critical ", true},
{Color::Cyan, " User Tag ", true},
};
for (auto [color, name, bold] : indicators) {
add_str(w[Win::TimelineDesc], "__", color, bold);
add_str(w[Win::TimelineDesc], name);
}
}
void ConsoleUI::logMessage(ReplyMsgType type, const QString &msg) {
if (auto win = w[Win::Log]) {
Color color = Color::Default;
if (type == ReplyMsgType::Debug) {
color = Color::Debug;
} else if (type == ReplyMsgType::Warning) {
color = Color::Yellow;
} else if (type == ReplyMsgType::Critical) {
color = Color::Red;
}
add_str(win, qPrintable(msg + "\n"), color);
wrefresh(win);
}
}
void ConsoleUI::updateProgressBar(uint64_t cur, uint64_t total, bool success) {
werase(w[Win::DownloadBar]);
if (success && cur < total) {
const int width = 35;
const float progress = cur / (double)total;
const int pos = width * progress;
wprintw(w[Win::DownloadBar], "Downloading [%s>%s] %d%% %s", std::string(pos, '=').c_str(),
std::string(width - pos, ' ').c_str(), int(progress * 100.0), formattedDataSize(total).c_str());
}
wrefresh(w[Win::DownloadBar]);
}
void ConsoleUI::updateSummary() {
const auto &route = replay->route();
mvwprintw(w[Win::Stats], 0, 0, "Route: %s, %lu segments", qPrintable(route->name()), route->segments().size());
mvwprintw(w[Win::Stats], 1, 0, "Car Fingerprint: %s", replay->carFingerprint().c_str());
wrefresh(w[Win::Stats]);
}
void ConsoleUI::updateTimeline() {
auto win = w[Win::Timeline];
int width = getmaxx(win);
werase(win);
wattron(win, COLOR_PAIR(Color::Disengaged));
mvwhline(win, 1, 0, ' ', width);
mvwhline(win, 2, 0, ' ', width);
wattroff(win, COLOR_PAIR(Color::Disengaged));
const int total_sec = replay->totalSeconds();
for (auto [begin, end, type] : replay->getTimeline()) {
int start_pos = (begin / total_sec) * width;
int end_pos = (end / total_sec) * width;
if (type == TimelineType::Engaged) {
mvwchgat(win, 1, start_pos, end_pos - start_pos + 1, A_COLOR, Color::Engaged, NULL);
mvwchgat(win, 2, start_pos, end_pos - start_pos + 1, A_COLOR, Color::Engaged, NULL);
} else if (type == TimelineType::UserFlag) {
mvwchgat(win, 3, start_pos, end_pos - start_pos + 1, ACS_S3, Color::Cyan, NULL);
} else {
auto color_id = Color::Green;
if (type != TimelineType::AlertInfo) {
color_id = type == TimelineType::AlertWarning ? Color::Yellow : Color::Red;
}
mvwchgat(win, 3, start_pos, end_pos - start_pos + 1, ACS_S3, color_id, NULL);
}
}
int cur_pos = ((double)replay->currentSeconds() / total_sec) * width;
wattron(win, COLOR_PAIR(Color::BrightWhite));
mvwaddch(win, 0, cur_pos, ACS_VLINE);
mvwaddch(win, 3, cur_pos, ACS_VLINE);
wattroff(win, COLOR_PAIR(Color::BrightWhite));
wrefresh(win);
}
void ConsoleUI::readyRead() {
int c;
while ((c = getch()) != ERR) {
handleKey(c);
}
}
void ConsoleUI::pauseReplay(bool pause) {
replay->pause(pause);
status = pause ? Status::Paused : Status::Waiting;
}
void ConsoleUI::handleKey(char c) {
if (c == '\n') {
// pause the replay and blocking getchar()
pauseReplay(true);
updateStatus();
getch_timer.stop();
curs_set(true);
nodelay(stdscr, false);
// Wait for user input
rWarning("Waiting for input...");
int y = getmaxy(stdscr) - 9;
move(y, BORDER_SIZE);
add_str(stdscr, "Enter seek request: ", Color::BrightWhite, true);
refresh();
// Seek to choice
echo();
int choice = 0;
scanw((char *)"%d", &choice);
noecho();
pauseReplay(false);
replay->seekTo(choice, false);
// Clean up and turn off the blocking mode
move(y, 0);
clrtoeol();
nodelay(stdscr, true);
curs_set(false);
refresh();
getch_timer.start(1000, this);
} else if (c == '+' || c == '=') {
auto it = std::upper_bound(speed_array.begin(), speed_array.end(), replay->getSpeed());
if (it != speed_array.end()) {
rWarning("playback speed: %.1fx", *it);
replay->setSpeed(*it);
}
} else if (c == '_' || c == '-') {
auto it = std::lower_bound(speed_array.begin(), speed_array.end(), replay->getSpeed());
if (it != speed_array.begin()) {
auto prev = std::prev(it);
rWarning("playback speed: %.1fx", *prev);
replay->setSpeed(*prev);
}
} else if (c == 'e') {
replay->seekToFlag(FindFlag::nextEngagement);
} else if (c == 'd') {
replay->seekToFlag(FindFlag::nextDisEngagement);
} else if (c == 't') {
replay->seekToFlag(FindFlag::nextUserFlag);
} else if (c == 'i') {
replay->seekToFlag(FindFlag::nextInfo);
} else if (c == 'w') {
replay->seekToFlag(FindFlag::nextWarning);
} else if (c == 'c') {
replay->seekToFlag(FindFlag::nextCritical);
} else if (c == 'm') {
replay->seekTo(+60, true);
} else if (c == 'M') {
replay->seekTo(-60, true);
} else if (c == 's') {
replay->seekTo(+10, true);
} else if (c == 'S') {
replay->seekTo(-10, true);
} else if (c == ' ') {
pauseReplay(!replay->isPaused());
} else if (c == 'q' || c == 'Q') {
replay->stop();
qApp->exit();
}
}

51
tools/replay/consoleui.h Normal file
View File

@@ -0,0 +1,51 @@
#pragma once
#include <array>
#include <QBasicTimer>
#include <QObject>
#include <QSocketNotifier>
#include <QTimer>
#include <QTimerEvent>
#include "tools/replay/replay.h"
#include <ncurses.h>
class ConsoleUI : public QObject {
Q_OBJECT
public:
ConsoleUI(Replay *replay, QObject *parent = 0);
~ConsoleUI();
inline static const std::array speed_array = {0.2f, 0.5f, 1.0f, 2.0f, 3.0f};
private:
void initWindows();
void handleKey(char c);
void displayHelp();
void displayTimelineDesc();
void updateTimeline();
void updateSummary();
void updateStatus();
void pauseReplay(bool pause);
enum Status { Waiting, Playing, Paused };
enum Win { Title, Stats, Log, LogBorder, DownloadBar, Timeline, TimelineDesc, Help, CarState, Max};
std::array<WINDOW*, Win::Max> w{};
SubMaster sm;
Replay *replay;
QBasicTimer getch_timer;
QTimer sm_timer;
QSocketNotifier notifier{0, QSocketNotifier::Read, this};
int max_width, max_height;
Status status = Status::Waiting;
signals:
void updateProgressBarSignal(uint64_t cur, uint64_t total, bool success);
void logMessageSignal(ReplyMsgType type, const QString &msg);
private slots:
void readyRead();
void timerEvent(QTimerEvent *ev);
void updateProgressBar(uint64_t cur, uint64_t total, bool success);
void logMessage(ReplyMsgType type, const QString &msg);
};

View File

@@ -0,0 +1,46 @@
#include "tools/replay/filereader.h"
#include <fstream>
#include "common/util.h"
#include "system/hardware/hw.h"
#include "tools/replay/util.h"
std::string cacheFilePath(const std::string &url) {
static std::string cache_path = [] {
const std::string comma_cache = Path::download_cache_root();
util::create_directories(comma_cache, 0755);
return comma_cache.back() == '/' ? comma_cache : comma_cache + "/";
}();
return cache_path + sha256(getUrlWithoutQuery(url));
}
std::string FileReader::read(const std::string &file, std::atomic<bool> *abort) {
const bool is_remote = file.find("https://") == 0;
const std::string local_file = is_remote ? cacheFilePath(file) : file;
std::string result;
if ((!is_remote || cache_to_local_) && util::file_exists(local_file)) {
result = util::read_file(local_file);
} else if (is_remote) {
result = download(file, abort);
if (cache_to_local_ && !result.empty()) {
std::ofstream fs(local_file, std::ios::binary | std::ios::out);
fs.write(result.data(), result.size());
}
}
return result;
}
std::string FileReader::download(const std::string &url, std::atomic<bool> *abort) {
for (int i = 0; i <= max_retries_ && !(abort && *abort); ++i) {
if (i > 0) rWarning("download failed, retrying %d", i);
std::string result = httpGet(url, chunk_size_, abort);
if (!result.empty()) {
return result;
}
}
return {};
}

20
tools/replay/filereader.h Normal file
View File

@@ -0,0 +1,20 @@
#pragma once
#include <atomic>
#include <string>
class FileReader {
public:
FileReader(bool cache_to_local, size_t chunk_size = 0, int retries = 3)
: cache_to_local_(cache_to_local), chunk_size_(chunk_size), max_retries_(retries) {}
virtual ~FileReader() {}
std::string read(const std::string &file, std::atomic<bool> *abort = nullptr);
private:
std::string download(const std::string &url, std::atomic<bool> *abort);
size_t chunk_size_;
int max_retries_;
bool cache_to_local_;
};
std::string cacheFilePath(const std::string &url);

251
tools/replay/framereader.cc Normal file
View File

@@ -0,0 +1,251 @@
#include "tools/replay/framereader.h"
#include "tools/replay/util.h"
#include <cassert>
#include <algorithm>
#include "third_party/libyuv/include/libyuv.h"
#ifdef __APPLE__
#define HW_DEVICE_TYPE AV_HWDEVICE_TYPE_VIDEOTOOLBOX
#define HW_PIX_FMT AV_PIX_FMT_VIDEOTOOLBOX
#else
#define HW_DEVICE_TYPE AV_HWDEVICE_TYPE_CUDA
#define HW_PIX_FMT AV_PIX_FMT_CUDA
#endif
namespace {
struct buffer_data {
const uint8_t *data;
int64_t offset;
size_t size;
};
int readPacket(void *opaque, uint8_t *buf, int buf_size) {
struct buffer_data *bd = (struct buffer_data *)opaque;
assert(bd->offset <= bd->size);
buf_size = std::min((size_t)buf_size, (size_t)(bd->size - bd->offset));
if (!buf_size) return AVERROR_EOF;
memcpy(buf, bd->data + bd->offset, buf_size);
bd->offset += buf_size;
return buf_size;
}
enum AVPixelFormat get_hw_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) {
enum AVPixelFormat *hw_pix_fmt = reinterpret_cast<enum AVPixelFormat *>(ctx->opaque);
for (const enum AVPixelFormat *p = pix_fmts; *p != -1; p++) {
if (*p == *hw_pix_fmt) return *p;
}
rWarning("Please run replay with the --no-hw-decoder flag!");
// fallback to YUV420p
*hw_pix_fmt = AV_PIX_FMT_NONE;
return AV_PIX_FMT_YUV420P;
}
} // namespace
FrameReader::FrameReader() {
av_log_set_level(AV_LOG_QUIET);
}
FrameReader::~FrameReader() {
for (AVPacket *pkt : packets) {
av_packet_free(&pkt);
}
if (decoder_ctx) avcodec_free_context(&decoder_ctx);
if (input_ctx) avformat_close_input(&input_ctx);
if (hw_device_ctx) av_buffer_unref(&hw_device_ctx);
if (avio_ctx_) {
av_freep(&avio_ctx_->buffer);
avio_context_free(&avio_ctx_);
}
}
bool FrameReader::load(const std::string &url, bool no_hw_decoder, std::atomic<bool> *abort, bool local_cache, int chunk_size, int retries) {
FileReader f(local_cache, chunk_size, retries);
std::string data = f.read(url, abort);
if (data.empty()) {
rWarning("URL %s returned no data", url.c_str());
return false;
}
return load((std::byte *)data.data(), data.size(), no_hw_decoder, abort);
}
bool FrameReader::load(const std::byte *data, size_t size, bool no_hw_decoder, std::atomic<bool> *abort) {
input_ctx = avformat_alloc_context();
if (!input_ctx) {
rError("Error calling avformat_alloc_context");
return false;
}
struct buffer_data bd = {
.data = (const uint8_t*)data,
.offset = 0,
.size = size,
};
const int avio_ctx_buffer_size = 64 * 1024;
unsigned char *avio_ctx_buffer = (unsigned char *)av_malloc(avio_ctx_buffer_size);
avio_ctx_ = avio_alloc_context(avio_ctx_buffer, avio_ctx_buffer_size, 0, &bd, readPacket, nullptr, nullptr);
input_ctx->pb = avio_ctx_;
input_ctx->probesize = 10 * 1024 * 1024; // 10MB
int ret = avformat_open_input(&input_ctx, nullptr, nullptr, nullptr);
if (ret != 0) {
char err_str[1024] = {0};
av_strerror(ret, err_str, std::size(err_str));
rError("Error loading video - %s", err_str);
return false;
}
ret = avformat_find_stream_info(input_ctx, nullptr);
if (ret < 0) {
rError("cannot find a video stream in the input file");
return false;
}
AVStream *video = input_ctx->streams[0];
const AVCodec *decoder = avcodec_find_decoder(video->codecpar->codec_id);
if (!decoder) return false;
decoder_ctx = avcodec_alloc_context3(decoder);
ret = avcodec_parameters_to_context(decoder_ctx, video->codecpar);
if (ret != 0) return false;
width = (decoder_ctx->width + 3) & ~3;
height = decoder_ctx->height;
if (has_hw_decoder && !no_hw_decoder) {
if (!initHardwareDecoder(HW_DEVICE_TYPE)) {
rWarning("No device with hardware decoder found. fallback to CPU decoding.");
}
}
ret = avcodec_open2(decoder_ctx, decoder, nullptr);
if (ret < 0) {
rError("avcodec_open2 failed %d", ret);
return false;
}
packets.reserve(60 * 20); // 20fps, one minute
while (!(abort && *abort)) {
AVPacket *pkt = av_packet_alloc();
ret = av_read_frame(input_ctx, pkt);
if (ret < 0) {
av_packet_free(&pkt);
valid_ = (ret == AVERROR_EOF);
break;
}
packets.push_back(pkt);
// some stream seems to contain no keyframes
key_frames_count_ += pkt->flags & AV_PKT_FLAG_KEY;
}
valid_ = valid_ && !packets.empty();
return valid_;
}
bool FrameReader::initHardwareDecoder(AVHWDeviceType hw_device_type) {
for (int i = 0;; i++) {
const AVCodecHWConfig *config = avcodec_get_hw_config(decoder_ctx->codec, i);
if (!config) {
rWarning("decoder %s does not support hw device type %s.", decoder_ctx->codec->name,
av_hwdevice_get_type_name(hw_device_type));
return false;
}
if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX && config->device_type == hw_device_type) {
hw_pix_fmt = config->pix_fmt;
break;
}
}
int ret = av_hwdevice_ctx_create(&hw_device_ctx, hw_device_type, nullptr, nullptr, 0);
if (ret < 0) {
hw_pix_fmt = AV_PIX_FMT_NONE;
has_hw_decoder = false;
rWarning("Failed to create specified HW device %d.", ret);
return false;
}
decoder_ctx->hw_device_ctx = av_buffer_ref(hw_device_ctx);
decoder_ctx->opaque = &hw_pix_fmt;
decoder_ctx->get_format = get_hw_format;
return true;
}
bool FrameReader::get(int idx, VisionBuf *buf) {
assert(buf != nullptr);
if (!valid_ || idx < 0 || idx >= packets.size()) {
return false;
}
return decode(idx, buf);
}
bool FrameReader::decode(int idx, VisionBuf *buf) {
int from_idx = idx;
if (idx != prev_idx + 1 && key_frames_count_ > 1) {
// seeking to the nearest key frame
for (int i = idx; i >= 0; --i) {
if (packets[i]->flags & AV_PKT_FLAG_KEY) {
from_idx = i;
break;
}
}
}
prev_idx = idx;
for (int i = from_idx; i <= idx; ++i) {
AVFrame *f = decodeFrame(packets[i]);
if (f && i == idx) {
return copyBuffers(f, buf);
}
}
return false;
}
AVFrame *FrameReader::decodeFrame(AVPacket *pkt) {
int ret = avcodec_send_packet(decoder_ctx, pkt);
if (ret < 0) {
rError("Error sending a packet for decoding: %d", ret);
return nullptr;
}
av_frame_.reset(av_frame_alloc());
ret = avcodec_receive_frame(decoder_ctx, av_frame_.get());
if (ret != 0) {
rError("avcodec_receive_frame error: %d", ret);
return nullptr;
}
if (av_frame_->format == hw_pix_fmt) {
hw_frame.reset(av_frame_alloc());
if ((ret = av_hwframe_transfer_data(hw_frame.get(), av_frame_.get(), 0)) < 0) {
rError("error transferring the data from GPU to CPU");
return nullptr;
}
return hw_frame.get();
} else {
return av_frame_.get();
}
}
bool FrameReader::copyBuffers(AVFrame *f, VisionBuf *buf) {
assert(f != nullptr && buf != nullptr);
if (hw_pix_fmt == HW_PIX_FMT) {
for (int i = 0; i < height/2; i++) {
memcpy(buf->y + (i*2 + 0)*buf->stride, f->data[0] + (i*2 + 0)*f->linesize[0], width);
memcpy(buf->y + (i*2 + 1)*buf->stride, f->data[0] + (i*2 + 1)*f->linesize[0], width);
memcpy(buf->uv + i*buf->stride, f->data[1] + i*f->linesize[1], width);
}
} else {
libyuv::I420ToNV12(f->data[0], f->linesize[0],
f->data[1], f->linesize[1],
f->data[2], f->linesize[2],
buf->y, buf->stride,
buf->uv, buf->stride,
width, height);
}
return true;
}

View File

@@ -0,0 +1,51 @@
#pragma once
#include <memory>
#include <string>
#include <vector>
#include "cereal/visionipc/visionbuf.h"
#include "tools/replay/filereader.h"
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
}
struct AVFrameDeleter {
void operator()(AVFrame* frame) const { av_frame_free(&frame); }
};
class FrameReader {
public:
FrameReader();
~FrameReader();
bool load(const std::string &url, bool no_hw_decoder = false, std::atomic<bool> *abort = nullptr, bool local_cache = false,
int chunk_size = -1, int retries = 0);
bool load(const std::byte *data, size_t size, bool no_hw_decoder = false, std::atomic<bool> *abort = nullptr);
bool get(int idx, VisionBuf *buf);
int getYUVSize() const { return width * height * 3 / 2; }
size_t getFrameCount() const { return packets.size(); }
bool valid() const { return valid_; }
int width = 0, height = 0;
private:
bool initHardwareDecoder(AVHWDeviceType hw_device_type);
bool decode(int idx, VisionBuf *buf);
AVFrame * decodeFrame(AVPacket *pkt);
bool copyBuffers(AVFrame *f, VisionBuf *buf);
std::vector<AVPacket*> packets;
std::unique_ptr<AVFrame, AVFrameDeleter>av_frame_, hw_frame;
AVFormatContext *input_ctx = nullptr;
AVCodecContext *decoder_ctx = nullptr;
int key_frames_count_ = 0;
bool valid_ = false;
AVIOContext *avio_ctx_ = nullptr;
AVPixelFormat hw_pix_fmt = AV_PIX_FMT_NONE;
AVBufferRef *hw_device_ctx = nullptr;
int prev_idx = -1;
inline static std::atomic<bool> has_hw_decoder = true;
};

98
tools/replay/logreader.cc Normal file
View File

@@ -0,0 +1,98 @@
#include "tools/replay/logreader.h"
#include <algorithm>
#include "tools/replay/filereader.h"
#include "tools/replay/util.h"
Event::Event(const kj::ArrayPtr<const capnp::word> &amsg, bool frame) : reader(amsg), frame(frame) {
words = kj::ArrayPtr<const capnp::word>(amsg.begin(), reader.getEnd());
event = reader.getRoot<cereal::Event>();
which = event.which();
mono_time = event.getLogMonoTime();
// 1) Send video data at t=timestampEof/timestampSof
// 2) Send encodeIndex packet at t=logMonoTime
if (frame) {
auto idx = capnp::AnyStruct::Reader(event).getPointerSection()[0].getAs<cereal::EncodeIndex>();
// C2 only has eof set, and some older routes have neither
uint64_t sof = idx.getTimestampSof();
uint64_t eof = idx.getTimestampEof();
if (sof > 0) {
mono_time = sof;
} else if (eof > 0) {
mono_time = eof;
}
}
}
// class LogReader
LogReader::LogReader(size_t memory_pool_block_size) {
#ifdef HAS_MEMORY_RESOURCE
const size_t buf_size = sizeof(Event) * memory_pool_block_size;
mbr_ = std::make_unique<std::pmr::monotonic_buffer_resource>(buf_size);
#endif
events.reserve(memory_pool_block_size);
}
LogReader::~LogReader() {
for (Event *e : events) {
delete e;
}
}
bool LogReader::load(const std::string &url, std::atomic<bool> *abort, bool local_cache, int chunk_size, int retries) {
raw_ = FileReader(local_cache, chunk_size, retries).read(url, abort);
if (raw_.empty()) return false;
if (url.find(".bz2") != std::string::npos) {
raw_ = decompressBZ2(raw_, abort);
if (raw_.empty()) return false;
}
return parse(abort);
}
bool LogReader::load(const std::byte *data, size_t size, std::atomic<bool> *abort) {
raw_.assign((const char *)data, size);
return parse(abort);
}
bool LogReader::parse(std::atomic<bool> *abort) {
try {
kj::ArrayPtr<const capnp::word> words((const capnp::word *)raw_.data(), raw_.size() / sizeof(capnp::word));
while (words.size() > 0 && !(abort && *abort)) {
#ifdef HAS_MEMORY_RESOURCE
Event *evt = new (mbr_.get()) Event(words);
#else
Event *evt = new Event(words);
#endif
// Add encodeIdx packet again as a frame packet for the video stream
if (evt->which == cereal::Event::ROAD_ENCODE_IDX ||
evt->which == cereal::Event::DRIVER_ENCODE_IDX ||
evt->which == cereal::Event::WIDE_ROAD_ENCODE_IDX) {
#ifdef HAS_MEMORY_RESOURCE
Event *frame_evt = new (mbr_.get()) Event(words, true);
#else
Event *frame_evt = new Event(words, true);
#endif
events.push_back(frame_evt);
}
words = kj::arrayPtr(evt->reader.getEnd(), words.end());
events.push_back(evt);
}
} catch (const kj::Exception &e) {
rWarning("failed to parse log : %s", e.getDescription().cStr());
if (!events.empty()) {
rWarning("read %zu events from corrupt log", events.size());
}
}
if (!events.empty() && !(abort && *abort)) {
std::sort(events.begin(), events.end(), Event::lessThan());
return true;
}
return false;
}

67
tools/replay/logreader.h Normal file
View File

@@ -0,0 +1,67 @@
#pragma once
#if __has_include(<memory_resource>)
#define HAS_MEMORY_RESOURCE 1
#include <memory_resource>
#endif
#include <memory>
#include <string>
#include <vector>
#include "cereal/gen/cpp/log.capnp.h"
#include "system/camerad/cameras/camera_common.h"
const CameraType ALL_CAMERAS[] = {RoadCam, DriverCam, WideRoadCam};
const int MAX_CAMERAS = std::size(ALL_CAMERAS);
const int DEFAULT_EVENT_MEMORY_POOL_BLOCK_SIZE = 65000;
class Event {
public:
Event(cereal::Event::Which which, uint64_t mono_time) : reader(kj::ArrayPtr<capnp::word>{}) {
// construct a dummy Event for binary search, e.g std::upper_bound
this->which = which;
this->mono_time = mono_time;
}
Event(const kj::ArrayPtr<const capnp::word> &amsg, bool frame = false);
inline kj::ArrayPtr<const capnp::byte> bytes() const { return words.asBytes(); }
struct lessThan {
inline bool operator()(const Event *l, const Event *r) {
return l->mono_time < r->mono_time || (l->mono_time == r->mono_time && l->which < r->which);
}
};
#if HAS_MEMORY_RESOURCE
void *operator new(size_t size, std::pmr::monotonic_buffer_resource *mbr) {
return mbr->allocate(size);
}
void operator delete(void *ptr) {
// No-op. memory used by EventMemoryPool increases monotonically until the logReader is destroyed.
}
#endif
uint64_t mono_time;
cereal::Event::Which which;
cereal::Event::Reader event;
capnp::FlatArrayMessageReader reader;
kj::ArrayPtr<const capnp::word> words;
bool frame;
};
class LogReader {
public:
LogReader(size_t memory_pool_block_size = DEFAULT_EVENT_MEMORY_POOL_BLOCK_SIZE);
~LogReader();
bool load(const std::string &url, std::atomic<bool> *abort = nullptr,
bool local_cache = false, int chunk_size = -1, int retries = 0);
bool load(const std::byte *data, size_t size, std::atomic<bool> *abort = nullptr);
std::vector<Event*> events;
private:
bool parse(std::atomic<bool> *abort);
std::string raw_;
#ifdef HAS_MEMORY_RESOURCE
std::unique_ptr<std::pmr::monotonic_buffer_resource> mbr_;
#endif
};

83
tools/replay/main.cc Normal file
View File

@@ -0,0 +1,83 @@
#include <QApplication>
#include <QCommandLineParser>
#include "common/prefix.h"
#include "tools/replay/consoleui.h"
#include "tools/replay/replay.h"
int main(int argc, char *argv[]) {
#ifdef __APPLE__
// With all sockets opened, we might hit the default limit of 256 on macOS
util::set_file_descriptor_limit(1024);
#endif
QCoreApplication app(argc, argv);
const std::tuple<QString, REPLAY_FLAGS, QString> flags[] = {
{"dcam", REPLAY_FLAG_DCAM, "load driver camera"},
{"ecam", REPLAY_FLAG_ECAM, "load wide road camera"},
{"no-loop", REPLAY_FLAG_NO_LOOP, "stop at the end of the route"},
{"no-cache", REPLAY_FLAG_NO_FILE_CACHE, "turn off local cache"},
{"qcam", REPLAY_FLAG_QCAMERA, "load qcamera"},
{"no-hw-decoder", REPLAY_FLAG_NO_HW_DECODER, "disable HW video decoding"},
{"no-vipc", REPLAY_FLAG_NO_VIPC, "do not output video"},
{"all", REPLAY_FLAG_ALL_SERVICES, "do output all messages including uiDebug, userFlag"
". this may causes issues when used along with UI"}
};
QCommandLineParser parser;
parser.setApplicationDescription("Mock openpilot components by publishing logged messages.");
parser.addHelpOption();
parser.addPositionalArgument("route", "the drive to replay. find your drives at connect.comma.ai");
parser.addOption({{"a", "allow"}, "whitelist of services to send", "allow"});
parser.addOption({{"b", "block"}, "blacklist of services to send", "block"});
parser.addOption({{"c", "cache"}, "cache <n> segments in memory. default is 5", "n"});
parser.addOption({{"s", "start"}, "start from <seconds>", "seconds"});
parser.addOption({"x", QString("playback <speed>. between %1 - %2")
.arg(ConsoleUI::speed_array.front()).arg(ConsoleUI::speed_array.back()), "speed"});
parser.addOption({"demo", "use a demo route instead of providing your own"});
parser.addOption({"data_dir", "local directory with routes", "data_dir"});
parser.addOption({"prefix", "set OPENPILOT_PREFIX", "prefix"});
for (auto &[name, _, desc] : flags) {
parser.addOption({name, desc});
}
parser.process(app);
const QStringList args = parser.positionalArguments();
if (args.empty() && !parser.isSet("demo")) {
parser.showHelp();
}
const QString route = args.empty() ? DEMO_ROUTE : args.first();
QStringList allow = parser.value("allow").isEmpty() ? QStringList{} : parser.value("allow").split(",");
QStringList block = parser.value("block").isEmpty() ? QStringList{} : parser.value("block").split(",");
uint32_t replay_flags = REPLAY_FLAG_NONE;
for (const auto &[name, flag, _] : flags) {
if (parser.isSet(name)) {
replay_flags |= flag;
}
}
std::unique_ptr<OpenpilotPrefix> op_prefix;
auto prefix = parser.value("prefix");
if (!prefix.isEmpty()) {
op_prefix.reset(new OpenpilotPrefix(prefix.toStdString()));
}
Replay *replay = new Replay(route, allow, block, nullptr, replay_flags, parser.value("data_dir"), &app);
if (!parser.value("c").isEmpty()) {
replay->setSegmentCacheLimit(parser.value("c").toInt());
}
if (!parser.value("x").isEmpty()) {
replay->setSpeed(std::clamp(parser.value("x").toFloat(),
ConsoleUI::speed_array.front(), ConsoleUI::speed_array.back()));
}
if (!replay->load()) {
return 0;
}
ConsoleUI console_ui(replay);
replay->start(parser.value("start").toInt());
return app.exec();
}

428
tools/replay/replay.cc Normal file
View File

@@ -0,0 +1,428 @@
#include "tools/replay/replay.h"
#include <QDebug>
#include <QtConcurrent>
#include <capnp/dynamic.h>
#include "cereal/services.h"
#include "common/params.h"
#include "common/timing.h"
#include "tools/replay/util.h"
Replay::Replay(QString route, QStringList allow, QStringList block, SubMaster *sm_,
uint32_t flags, QString data_dir, QObject *parent) : sm(sm_), flags_(flags), QObject(parent) {
if (!(flags_ & REPLAY_FLAG_ALL_SERVICES)) {
block << "uiDebug" << "userFlag";
}
auto event_struct = capnp::Schema::from<cereal::Event>().asStruct();
sockets_.resize(event_struct.getUnionFields().size());
for (const auto &[name, _] : services) {
if (!block.contains(name.c_str()) && (allow.empty() || allow.contains(name.c_str()))) {
uint16_t which = event_struct.getFieldByName(name).getProto().getDiscriminantValue();
sockets_[which] = name.c_str();
}
}
std::vector<const char *> s;
std::copy_if(sockets_.begin(), sockets_.end(), std::back_inserter(s),
[](const char *name) { return name != nullptr; });
qDebug() << "services " << s;
qDebug() << "loading route " << route;
if (sm == nullptr) {
pm = std::make_unique<PubMaster>(s);
}
route_ = std::make_unique<Route>(route, data_dir);
events_ = std::make_unique<std::vector<Event *>>();
new_events_ = std::make_unique<std::vector<Event *>>();
}
Replay::~Replay() {
stop();
}
void Replay::stop() {
if (!stream_thread_ && segments_.empty()) return;
rInfo("shutdown: in progress...");
if (stream_thread_ != nullptr) {
exit_ = updating_events_ = true;
stream_cv_.notify_one();
stream_thread_->quit();
stream_thread_->wait();
stream_thread_ = nullptr;
}
camera_server_.reset(nullptr);
timeline_future.waitForFinished();
segments_.clear();
rInfo("shutdown: done");
}
bool Replay::load() {
if (!route_->load()) {
qCritical() << "failed to load route" << route_->name()
<< "from" << (route_->dir().isEmpty() ? "server" : route_->dir());
return false;
}
for (auto &[n, f] : route_->segments()) {
bool has_log = !f.rlog.isEmpty() || !f.qlog.isEmpty();
bool has_video = !f.road_cam.isEmpty() || !f.qcamera.isEmpty();
if (has_log && (has_video || hasFlag(REPLAY_FLAG_NO_VIPC))) {
segments_.insert({n, nullptr});
}
}
if (segments_.empty()) {
qCritical() << "no valid segments in route" << route_->name();
return false;
}
rInfo("load route %s with %zu valid segments", qPrintable(route_->name()), segments_.size());
return true;
}
void Replay::start(int seconds) {
seekTo(route_->identifier().segment_id * 60 + seconds, false);
}
void Replay::updateEvents(const std::function<bool()> &lambda) {
// set updating_events to true to force stream thread release the lock and wait for events_updated.
updating_events_ = true;
{
std::unique_lock lk(stream_lock_);
events_updated_ = lambda();
updating_events_ = false;
}
stream_cv_.notify_one();
}
void Replay::seekTo(double seconds, bool relative) {
seconds = relative ? seconds + currentSeconds() : seconds;
updateEvents([&]() {
seconds = std::max(double(0.0), seconds);
int seg = (int)seconds / 60;
if (segments_.find(seg) == segments_.end()) {
rWarning("can't seek to %d s segment %d is invalid", seconds, seg);
return true;
}
rInfo("seeking to %d s, segment %d", (int)seconds, seg);
current_segment_ = seg;
cur_mono_time_ = route_start_ts_ + seconds * 1e9;
emit seekedTo(seconds);
return isSegmentMerged(seg);
});
queueSegment();
}
void Replay::seekToFlag(FindFlag flag) {
if (auto next = find(flag)) {
seekTo(*next - 2, false); // seek to 2 seconds before next
}
}
void Replay::buildTimeline() {
uint64_t engaged_begin = 0;
bool engaged = false;
auto alert_status = cereal::ControlsState::AlertStatus::NORMAL;
auto alert_size = cereal::ControlsState::AlertSize::NONE;
uint64_t alert_begin = 0;
std::string alert_type;
const TimelineType timeline_types[] = {
[(int)cereal::ControlsState::AlertStatus::NORMAL] = TimelineType::AlertInfo,
[(int)cereal::ControlsState::AlertStatus::USER_PROMPT] = TimelineType::AlertWarning,
[(int)cereal::ControlsState::AlertStatus::CRITICAL] = TimelineType::AlertCritical,
};
const auto &route_segments = route_->segments();
for (auto it = route_segments.cbegin(); it != route_segments.cend() && !exit_; ++it) {
std::shared_ptr<LogReader> log(new LogReader());
if (!log->load(it->second.qlog.toStdString(), &exit_, !hasFlag(REPLAY_FLAG_NO_FILE_CACHE), 0, 3)) continue;
for (const Event *e : log->events) {
if (e->which == cereal::Event::Which::CONTROLS_STATE) {
auto cs = e->event.getControlsState();
if (engaged != cs.getEnabled()) {
if (engaged) {
std::lock_guard lk(timeline_lock);
timeline.push_back({toSeconds(engaged_begin), toSeconds(e->mono_time), TimelineType::Engaged});
}
engaged_begin = e->mono_time;
engaged = cs.getEnabled();
}
if (alert_type != cs.getAlertType().cStr() || alert_status != cs.getAlertStatus()) {
if (!alert_type.empty() && alert_size != cereal::ControlsState::AlertSize::NONE) {
std::lock_guard lk(timeline_lock);
timeline.push_back({toSeconds(alert_begin), toSeconds(e->mono_time), timeline_types[(int)alert_status]});
}
alert_begin = e->mono_time;
alert_type = cs.getAlertType().cStr();
alert_size = cs.getAlertSize();
alert_status = cs.getAlertStatus();
}
} else if (e->which == cereal::Event::Which::USER_FLAG) {
std::lock_guard lk(timeline_lock);
timeline.push_back({toSeconds(e->mono_time), toSeconds(e->mono_time), TimelineType::UserFlag});
}
}
std::sort(timeline.begin(), timeline.end(), [](auto &l, auto &r) { return std::get<2>(l) < std::get<2>(r); });
emit qLogLoaded(it->first, log);
}
}
std::optional<uint64_t> Replay::find(FindFlag flag) {
int cur_ts = currentSeconds();
for (auto [start_ts, end_ts, type] : getTimeline()) {
if (type == TimelineType::Engaged) {
if (flag == FindFlag::nextEngagement && start_ts > cur_ts) {
return start_ts;
} else if (flag == FindFlag::nextDisEngagement && end_ts > cur_ts) {
return end_ts;
}
} else if (start_ts > cur_ts) {
if ((flag == FindFlag::nextUserFlag && type == TimelineType::UserFlag) ||
(flag == FindFlag::nextInfo && type == TimelineType::AlertInfo) ||
(flag == FindFlag::nextWarning && type == TimelineType::AlertWarning) ||
(flag == FindFlag::nextCritical && type == TimelineType::AlertCritical)) {
return start_ts;
}
}
}
return std::nullopt;
}
void Replay::pause(bool pause) {
updateEvents([=]() {
rWarning("%s at %.2f s", pause ? "paused..." : "resuming", currentSeconds());
paused_ = pause;
return true;
});
}
void Replay::setCurrentSegment(int n) {
if (current_segment_.exchange(n) != n) {
QMetaObject::invokeMethod(this, &Replay::queueSegment, Qt::QueuedConnection);
}
}
void Replay::segmentLoadFinished(bool success) {
if (!success) {
Segment *seg = qobject_cast<Segment *>(sender());
rWarning("failed to load segment %d, removing it from current replay list", seg->seg_num);
updateEvents([&]() {
segments_.erase(seg->seg_num);
return true;
});
}
queueSegment();
}
void Replay::queueSegment() {
auto cur = segments_.lower_bound(current_segment_.load());
if (cur == segments_.end()) return;
auto begin = std::prev(cur, std::min<int>(segment_cache_limit / 2, std::distance(segments_.begin(), cur)));
auto end = std::next(begin, std::min<int>(segment_cache_limit, segments_.size()));
// load one segment at a time
auto it = std::find_if(cur, end, [](auto &it) { return !it.second || !it.second->isLoaded(); });
if (it != end && !it->second) {
rDebug("loading segment %d...", it->first);
it->second = std::make_unique<Segment>(it->first, route_->at(it->first), flags_);
QObject::connect(it->second.get(), &Segment::loadFinished, this, &Replay::segmentLoadFinished);
}
mergeSegments(begin, end);
// free segments out of current semgnt window.
std::for_each(segments_.begin(), begin, [](auto &e) { e.second.reset(nullptr); });
std::for_each(end, segments_.end(), [](auto &e) { e.second.reset(nullptr); });
// start stream thread
const auto &cur_segment = cur->second;
if (stream_thread_ == nullptr && cur_segment->isLoaded()) {
startStream(cur_segment.get());
emit streamStarted();
}
}
void Replay::mergeSegments(const SegmentMap::iterator &begin, const SegmentMap::iterator &end) {
std::vector<int> segments_need_merge;
size_t new_events_size = 0;
for (auto it = begin; it != end; ++it) {
if (it->second && it->second->isLoaded()) {
segments_need_merge.push_back(it->first);
new_events_size += it->second->log->events.size();
}
}
if (segments_need_merge != segments_merged_) {
std::string s;
for (int i = 0; i < segments_need_merge.size(); ++i) {
s += std::to_string(segments_need_merge[i]);
if (i != segments_need_merge.size() - 1) s += ", ";
}
rDebug("merge segments %s", s.c_str());
new_events_->clear();
new_events_->reserve(new_events_size);
for (int n : segments_need_merge) {
size_t size = new_events_->size();
const auto &events = segments_[n]->log->events;
std::copy_if(events.begin(), events.end(), std::back_inserter(*new_events_),
[this](auto e) { return e->which < sockets_.size() && sockets_[e->which] != nullptr; });
std::inplace_merge(new_events_->begin(), new_events_->begin() + size, new_events_->end(), Event::lessThan());
}
if (stream_thread_) {
emit segmentsMerged();
}
updateEvents([&]() {
events_.swap(new_events_);
segments_merged_ = segments_need_merge;
// Do not wake up the stream thread if the current segment has not been merged.
return isSegmentMerged(current_segment_) || (segments_.count(current_segment_) == 0);
});
}
}
void Replay::startStream(const Segment *cur_segment) {
const auto &events = cur_segment->log->events;
// each segment has an INIT_DATA
route_start_ts_ = events.front()->mono_time;
cur_mono_time_ += route_start_ts_ - 1;
// write CarParams
auto it = std::find_if(events.begin(), events.end(), [](auto e) { return e->which == cereal::Event::Which::CAR_PARAMS; });
if (it != events.end()) {
car_fingerprint_ = (*it)->event.getCarParams().getCarFingerprint();
capnp::MallocMessageBuilder builder;
builder.setRoot((*it)->event.getCarParams());
auto words = capnp::messageToFlatArray(builder);
auto bytes = words.asBytes();
Params().put("CarParams", (const char *)bytes.begin(), bytes.size());
Params().put("CarParamsPersistent", (const char *)bytes.begin(), bytes.size());
} else {
rWarning("failed to read CarParams from current segment");
}
// start camera server
if (!hasFlag(REPLAY_FLAG_NO_VIPC)) {
std::pair<int, int> camera_size[MAX_CAMERAS] = {};
for (auto type : ALL_CAMERAS) {
if (auto &fr = cur_segment->frames[type]) {
camera_size[type] = {fr->width, fr->height};
}
}
camera_server_ = std::make_unique<CameraServer>(camera_size);
}
emit segmentsMerged();
// start stream thread
stream_thread_ = new QThread();
QObject::connect(stream_thread_, &QThread::started, [=]() { stream(); });
QObject::connect(stream_thread_, &QThread::finished, stream_thread_, &QThread::deleteLater);
stream_thread_->start();
timeline_future = QtConcurrent::run(this, &Replay::buildTimeline);
}
void Replay::publishMessage(const Event *e) {
if (event_filter && event_filter(e, filter_opaque)) return;
if (sm == nullptr) {
auto bytes = e->bytes();
int ret = pm->send(sockets_[e->which], (capnp::byte *)bytes.begin(), bytes.size());
if (ret == -1) {
rWarning("stop publishing %s due to multiple publishers error", sockets_[e->which]);
sockets_[e->which] = nullptr;
}
} else {
sm->update_msgs(nanos_since_boot(), {{sockets_[e->which], e->event}});
}
}
void Replay::publishFrame(const Event *e) {
static const std::map<cereal::Event::Which, CameraType> cam_types{
{cereal::Event::ROAD_ENCODE_IDX, RoadCam},
{cereal::Event::DRIVER_ENCODE_IDX, DriverCam},
{cereal::Event::WIDE_ROAD_ENCODE_IDX, WideRoadCam},
};
if ((e->which == cereal::Event::DRIVER_ENCODE_IDX && !hasFlag(REPLAY_FLAG_DCAM)) ||
(e->which == cereal::Event::WIDE_ROAD_ENCODE_IDX && !hasFlag(REPLAY_FLAG_ECAM))) {
return;
}
auto eidx = capnp::AnyStruct::Reader(e->event).getPointerSection()[0].getAs<cereal::EncodeIndex>();
if (eidx.getType() == cereal::EncodeIndex::Type::FULL_H_E_V_C && isSegmentMerged(eidx.getSegmentNum())) {
CameraType cam = cam_types.at(e->which);
camera_server_->pushFrame(cam, segments_[eidx.getSegmentNum()]->frames[cam].get(), eidx);
}
}
void Replay::stream() {
cereal::Event::Which cur_which = cereal::Event::Which::INIT_DATA;
double prev_replay_speed = speed_;
std::unique_lock lk(stream_lock_);
while (true) {
stream_cv_.wait(lk, [=]() { return exit_ || (events_updated_ && !paused_); });
events_updated_ = false;
if (exit_) break;
Event cur_event(cur_which, cur_mono_time_);
auto eit = std::upper_bound(events_->begin(), events_->end(), &cur_event, Event::lessThan());
if (eit == events_->end()) {
rInfo("waiting for events...");
continue;
}
uint64_t evt_start_ts = cur_mono_time_;
uint64_t loop_start_ts = nanos_since_boot();
for (auto end = events_->end(); !updating_events_ && eit != end; ++eit) {
const Event *evt = (*eit);
cur_which = evt->which;
cur_mono_time_ = evt->mono_time;
setCurrentSegment(toSeconds(cur_mono_time_) / 60);
if (sockets_[cur_which] != nullptr) {
// keep time
long etime = (cur_mono_time_ - evt_start_ts) / speed_;
long rtime = nanos_since_boot() - loop_start_ts;
long behind_ns = etime - rtime;
// if behind_ns is greater than 1 second, it means that an invalid segment is skipped by seeking/replaying
if (behind_ns >= 1 * 1e9 || speed_ != prev_replay_speed) {
// reset event start times
evt_start_ts = cur_mono_time_;
loop_start_ts = nanos_since_boot();
prev_replay_speed = speed_;
} else if (behind_ns > 0) {
precise_nano_sleep(behind_ns);
}
if (!evt->frame) {
publishMessage(evt);
} else if (camera_server_) {
if (speed_ > 1.0) {
camera_server_->waitForSent();
}
publishFrame(evt);
}
}
}
// wait for frame to be sent before unlock.(frameReader may be deleted after unlock)
if (camera_server_) {
camera_server_->waitForSent();
}
if (eit == events_->end() && !hasFlag(REPLAY_FLAG_NO_LOOP)) {
int last_segment = segments_.empty() ? 0 : segments_.rbegin()->first;
if (current_segment_ >= last_segment && isSegmentMerged(last_segment)) {
rInfo("reaches the end of route, restart from beginning");
QMetaObject::invokeMethod(this, std::bind(&Replay::seekTo, this, 0, false), Qt::QueuedConnection);
}
}
}
}

146
tools/replay/replay.h Normal file
View File

@@ -0,0 +1,146 @@
#pragma once
#include <algorithm>
#include <map>
#include <memory>
#include <optional>
#include <string>
#include <tuple>
#include <vector>
#include <utility>
#include <QThread>
#include "tools/replay/camera.h"
#include "tools/replay/route.h"
const QString DEMO_ROUTE = "a2a0ccea32023010|2023-07-27--13-01-19";
// one segment uses about 100M of memory
constexpr int MIN_SEGMENTS_CACHE = 5;
enum REPLAY_FLAGS {
REPLAY_FLAG_NONE = 0x0000,
REPLAY_FLAG_DCAM = 0x0002,
REPLAY_FLAG_ECAM = 0x0004,
REPLAY_FLAG_NO_LOOP = 0x0010,
REPLAY_FLAG_NO_FILE_CACHE = 0x0020,
REPLAY_FLAG_QCAMERA = 0x0040,
REPLAY_FLAG_NO_HW_DECODER = 0x0100,
REPLAY_FLAG_NO_VIPC = 0x0400,
REPLAY_FLAG_ALL_SERVICES = 0x0800,
};
enum class FindFlag {
nextEngagement,
nextDisEngagement,
nextUserFlag,
nextInfo,
nextWarning,
nextCritical
};
enum class TimelineType { None, Engaged, AlertInfo, AlertWarning, AlertCritical, UserFlag };
typedef bool (*replayEventFilter)(const Event *, void *);
Q_DECLARE_METATYPE(std::shared_ptr<LogReader>);
class Replay : public QObject {
Q_OBJECT
public:
Replay(QString route, QStringList allow, QStringList block, SubMaster *sm = nullptr,
uint32_t flags = REPLAY_FLAG_NONE, QString data_dir = "", QObject *parent = 0);
~Replay();
bool load();
void start(int seconds = 0);
void stop();
void pause(bool pause);
void seekToFlag(FindFlag flag);
void seekTo(double seconds, bool relative);
inline bool isPaused() const { return paused_; }
// the filter is called in streaming thread.try to return quickly from it to avoid blocking streaming.
// the filter function must return true if the event should be filtered.
// otherwise it must return false.
inline void installEventFilter(replayEventFilter filter, void *opaque) {
filter_opaque = opaque;
event_filter = filter;
}
inline int segmentCacheLimit() const { return segment_cache_limit; }
inline void setSegmentCacheLimit(int n) { segment_cache_limit = std::max(MIN_SEGMENTS_CACHE, n); }
inline bool hasFlag(REPLAY_FLAGS flag) const { return flags_ & flag; }
inline void addFlag(REPLAY_FLAGS flag) { flags_ |= flag; }
inline void removeFlag(REPLAY_FLAGS flag) { flags_ &= ~flag; }
inline const Route* route() const { return route_.get(); }
inline double currentSeconds() const { return double(cur_mono_time_ - route_start_ts_) / 1e9; }
inline QDateTime currentDateTime() const { return route_->datetime().addSecs(currentSeconds()); }
inline uint64_t routeStartTime() const { return route_start_ts_; }
inline double toSeconds(uint64_t mono_time) const { return (mono_time - route_start_ts_) / 1e9; }
inline int totalSeconds() const { return (!segments_.empty()) ? (segments_.rbegin()->first + 1) * 60 : 0; }
inline void setSpeed(float speed) { speed_ = speed; }
inline float getSpeed() const { return speed_; }
inline const std::vector<Event *> *events() const { return events_.get(); }
inline const std::map<int, std::unique_ptr<Segment>> &segments() const { return segments_; }
inline const std::string &carFingerprint() const { return car_fingerprint_; }
inline const std::vector<std::tuple<double, double, TimelineType>> getTimeline() {
std::lock_guard lk(timeline_lock);
return timeline;
}
signals:
void streamStarted();
void segmentsMerged();
void seekedTo(double sec);
void qLogLoaded(int segnum, std::shared_ptr<LogReader> qlog);
protected slots:
void segmentLoadFinished(bool success);
protected:
typedef std::map<int, std::unique_ptr<Segment>> SegmentMap;
std::optional<uint64_t> find(FindFlag flag);
void startStream(const Segment *cur_segment);
void stream();
void setCurrentSegment(int n);
void queueSegment();
void mergeSegments(const SegmentMap::iterator &begin, const SegmentMap::iterator &end);
void updateEvents(const std::function<bool()>& lambda);
void publishMessage(const Event *e);
void publishFrame(const Event *e);
void buildTimeline();
inline bool isSegmentMerged(int n) {
return std::find(segments_merged_.begin(), segments_merged_.end(), n) != segments_merged_.end();
}
QThread *stream_thread_ = nullptr;
std::mutex stream_lock_;
std::condition_variable stream_cv_;
std::atomic<bool> updating_events_ = false;
std::atomic<int> current_segment_ = 0;
SegmentMap segments_;
// the following variables must be protected with stream_lock_
std::atomic<bool> exit_ = false;
bool paused_ = false;
bool events_updated_ = false;
uint64_t route_start_ts_ = 0;
std::atomic<uint64_t> cur_mono_time_ = 0;
std::unique_ptr<std::vector<Event *>> events_;
std::unique_ptr<std::vector<Event *>> new_events_;
std::vector<int> segments_merged_;
// messaging
SubMaster *sm = nullptr;
std::unique_ptr<PubMaster> pm;
std::vector<const char*> sockets_;
std::unique_ptr<Route> route_;
std::unique_ptr<CameraServer> camera_server_;
std::atomic<uint32_t> flags_ = REPLAY_FLAG_NONE;
std::mutex timeline_lock;
QFuture<void> timeline_future;
std::vector<std::tuple<double, double, TimelineType>> timeline;
std::string car_fingerprint_;
std::atomic<float> speed_ = 1.0;
replayEventFilter event_filter = nullptr;
void *filter_opaque = nullptr;
int segment_cache_limit = MIN_SEGMENTS_CACHE;
};

144
tools/replay/route.cc Normal file
View File

@@ -0,0 +1,144 @@
#include "tools/replay/route.h"
#include <QDir>
#include <QEventLoop>
#include <QJsonArray>
#include <QJsonDocument>
#include <QRegExp>
#include <QtConcurrent>
#include <array>
#include "selfdrive/ui/qt/api.h"
#include "system/hardware/hw.h"
#include "tools/replay/replay.h"
#include "tools/replay/util.h"
Route::Route(const QString &route, const QString &data_dir) : data_dir_(data_dir) {
route_ = parseRoute(route);
}
RouteIdentifier Route::parseRoute(const QString &str) {
QRegExp rx(R"(^(?:([a-z0-9]{16})([|_/]))?(\d{4}-\d{2}-\d{2}--\d{2}-\d{2}-\d{2})(?:(--|/)(\d*))?$)");
if (rx.indexIn(str) == -1) return {};
const QStringList list = rx.capturedTexts();
return {.dongle_id = list[1], .timestamp = list[3], .segment_id = list[5].toInt(), .str = list[1] + "|" + list[3]};
}
bool Route::load() {
if (route_.str.isEmpty() || (data_dir_.isEmpty() && route_.dongle_id.isEmpty())) {
rInfo("invalid route format");
return false;
}
date_time_ = QDateTime::fromString(route_.timestamp, "yyyy-MM-dd--HH-mm-ss");
return data_dir_.isEmpty() ? loadFromServer() : loadFromLocal();
}
bool Route::loadFromServer() {
QEventLoop loop;
HttpRequest http(nullptr, !Hardware::PC());
QObject::connect(&http, &HttpRequest::requestDone, [&](const QString &json, bool success, QNetworkReply::NetworkError error) {
if (error == QNetworkReply::ContentAccessDenied || error == QNetworkReply::AuthenticationRequiredError) {
qWarning() << ">> Unauthorized. Authenticate with tools/lib/auth.py <<";
}
loop.exit(success ? loadFromJson(json) : 0);
});
http.sendRequest(CommaApi::BASE_URL + "/v1/route/" + route_.str + "/files");
return loop.exec();
}
bool Route::loadFromJson(const QString &json) {
QRegExp rx(R"(\/(\d+)\/)");
for (const auto &value : QJsonDocument::fromJson(json.trimmed().toUtf8()).object()) {
for (const auto &url : value.toArray()) {
QString url_str = url.toString();
if (rx.indexIn(url_str) != -1) {
addFileToSegment(rx.cap(1).toInt(), url_str);
}
}
}
return !segments_.empty();
}
bool Route::loadFromLocal() {
QDir log_dir(data_dir_);
for (const auto &folder : log_dir.entryList(QDir::Dirs | QDir::NoDot | QDir::NoDotDot, QDir::NoSort)) {
int pos = folder.lastIndexOf("--");
if (pos != -1 && folder.left(pos) == route_.timestamp) {
const int seg_num = folder.mid(pos + 2).toInt();
QDir segment_dir(log_dir.filePath(folder));
for (const auto &f : segment_dir.entryList(QDir::Files)) {
addFileToSegment(seg_num, segment_dir.absoluteFilePath(f));
}
}
}
return !segments_.empty();
}
void Route::addFileToSegment(int n, const QString &file) {
QString name = QUrl(file).fileName();
const int pos = name.lastIndexOf("--");
name = pos != -1 ? name.mid(pos + 2) : name;
if (name == "rlog.bz2" || name == "rlog") {
segments_[n].rlog = file;
} else if (name == "qlog.bz2" || name == "qlog") {
segments_[n].qlog = file;
} else if (name == "fcamera.hevc") {
segments_[n].road_cam = file;
} else if (name == "dcamera.hevc") {
segments_[n].driver_cam = file;
} else if (name == "ecamera.hevc") {
segments_[n].wide_road_cam = file;
} else if (name == "qcamera.ts") {
segments_[n].qcamera = file;
}
}
// class Segment
Segment::Segment(int n, const SegmentFile &files, uint32_t flags) : seg_num(n), flags(flags) {
// [RoadCam, DriverCam, WideRoadCam, log]. fallback to qcamera/qlog
const std::array file_list = {
(flags & REPLAY_FLAG_QCAMERA) || files.road_cam.isEmpty() ? files.qcamera : files.road_cam,
flags & REPLAY_FLAG_DCAM ? files.driver_cam : "",
flags & REPLAY_FLAG_ECAM ? files.wide_road_cam : "",
files.rlog.isEmpty() ? files.qlog : files.rlog,
};
for (int i = 0; i < file_list.size(); ++i) {
if (!file_list[i].isEmpty() && (!(flags & REPLAY_FLAG_NO_VIPC) || i >= MAX_CAMERAS)) {
++loading_;
synchronizer_.addFuture(QtConcurrent::run(this, &Segment::loadFile, i, file_list[i].toStdString()));
}
}
}
Segment::~Segment() {
disconnect();
abort_ = true;
synchronizer_.setCancelOnWait(true);
synchronizer_.waitForFinished();
}
void Segment::loadFile(int id, const std::string file) {
const bool local_cache = !(flags & REPLAY_FLAG_NO_FILE_CACHE);
bool success = false;
if (id < MAX_CAMERAS) {
frames[id] = std::make_unique<FrameReader>();
success = frames[id]->load(file, flags & REPLAY_FLAG_NO_HW_DECODER, &abort_, local_cache, 20 * 1024 * 1024, 3);
} else {
log = std::make_unique<LogReader>();
success = log->load(file, &abort_, local_cache, 0, 3);
}
if (!success) {
// abort all loading jobs.
abort_ = true;
}
if (--loading_ == 0) {
emit loadFinished(!abort_);
}
}

75
tools/replay/route.h Normal file
View File

@@ -0,0 +1,75 @@
#pragma once
#include <map>
#include <memory>
#include <string>
#include <QDateTime>
#include <QFutureSynchronizer>
#include "tools/replay/framereader.h"
#include "tools/replay/logreader.h"
#include "tools/replay/util.h"
struct RouteIdentifier {
QString dongle_id;
QString timestamp;
int segment_id;
QString str;
};
struct SegmentFile {
QString rlog;
QString qlog;
QString road_cam;
QString driver_cam;
QString wide_road_cam;
QString qcamera;
};
class Route {
public:
Route(const QString &route, const QString &data_dir = {});
bool load();
inline const QString &name() const { return route_.str; }
inline const QDateTime datetime() const { return date_time_; }
inline const QString &dir() const { return data_dir_; }
inline const RouteIdentifier &identifier() const { return route_; }
inline const std::map<int, SegmentFile> &segments() const { return segments_; }
inline const SegmentFile &at(int n) { return segments_.at(n); }
static RouteIdentifier parseRoute(const QString &str);
protected:
bool loadFromLocal();
bool loadFromServer();
bool loadFromJson(const QString &json);
void addFileToSegment(int seg_num, const QString &file);
RouteIdentifier route_ = {};
QString data_dir_;
std::map<int, SegmentFile> segments_;
QDateTime date_time_;
};
class Segment : public QObject {
Q_OBJECT
public:
Segment(int n, const SegmentFile &files, uint32_t flags);
~Segment();
inline bool isLoaded() const { return !loading_ && !abort_; }
const int seg_num = 0;
std::unique_ptr<LogReader> log;
std::unique_ptr<FrameReader> frames[MAX_CAMERAS] = {};
signals:
void loadFinished(bool success);
protected:
void loadFile(int id, const std::string file);
std::atomic<bool> abort_ = false;
std::atomic<int> loading_ = 0;
QFutureSynchronizer<void> synchronizer_;
uint32_t flags;
};

331
tools/replay/util.cc Normal file
View File

@@ -0,0 +1,331 @@
#include "tools/replay/util.h"
#include <bzlib.h>
#include <curl/curl.h>
#include <openssl/sha.h>
#include <cstdarg>
#include <cstring>
#include <cassert>
#include <cmath>
#include <fstream>
#include <iostream>
#include <map>
#include <mutex>
#include <numeric>
#include <utility>
#include "common/timing.h"
#include "common/util.h"
ReplayMessageHandler message_handler = nullptr;
void installMessageHandler(ReplayMessageHandler handler) { message_handler = handler; }
void logMessage(ReplyMsgType type, const char *fmt, ...) {
static std::mutex lock;
std::lock_guard lk(lock);
char *msg_buf = nullptr;
va_list args;
va_start(args, fmt);
int ret = vasprintf(&msg_buf, fmt, args);
va_end(args);
if (ret <= 0 || !msg_buf) return;
if (message_handler) {
message_handler(type, msg_buf);
} else {
if (type == ReplyMsgType::Debug) {
std::cout << "\033[38;5;248m" << msg_buf << "\033[00m" << std::endl;
} else if (type == ReplyMsgType::Warning) {
std::cout << "\033[38;5;227m" << msg_buf << "\033[00m" << std::endl;
} else if (type == ReplyMsgType::Critical) {
std::cout << "\033[38;5;196m" << msg_buf << "\033[00m" << std::endl;
} else {
std::cout << msg_buf << std::endl;
}
}
free(msg_buf);
}
namespace {
struct CURLGlobalInitializer {
CURLGlobalInitializer() { curl_global_init(CURL_GLOBAL_DEFAULT); }
~CURLGlobalInitializer() { curl_global_cleanup(); }
};
static CURLGlobalInitializer curl_initializer;
template <class T>
struct MultiPartWriter {
T *buf;
size_t *total_written;
size_t offset;
size_t end;
size_t write(char *data, size_t size, size_t count) {
size_t bytes = size * count;
if ((offset + bytes) > end) return 0;
if constexpr (std::is_same<T, std::string>::value) {
memcpy(buf->data() + offset, data, bytes);
} else if constexpr (std::is_same<T, std::ofstream>::value) {
buf->seekp(offset);
buf->write(data, bytes);
}
offset += bytes;
*total_written += bytes;
return bytes;
}
};
template <class T>
size_t write_cb(char *data, size_t size, size_t count, void *userp) {
auto w = (MultiPartWriter<T> *)userp;
return w->write(data, size, count);
}
size_t dumy_write_cb(char *data, size_t size, size_t count, void *userp) { return size * count; }
struct DownloadStats {
void installDownloadProgressHandler(DownloadProgressHandler handler) {
std::lock_guard lk(lock);
download_progress_handler = handler;
}
void add(const std::string &url, uint64_t total_bytes) {
std::lock_guard lk(lock);
items[url] = {0, total_bytes};
}
void remove(const std::string &url) {
std::lock_guard lk(lock);
items.erase(url);
}
void update(const std::string &url, uint64_t downloaded, bool success = true) {
std::lock_guard lk(lock);
items[url].first = downloaded;
auto stat = std::accumulate(items.begin(), items.end(), std::pair<int, int>{}, [=](auto &a, auto &b){
return std::pair{a.first + b.second.first, a.second + b.second.second};
});
double tm = millis_since_boot();
if (download_progress_handler && ((tm - prev_tm) > 500 || !success || stat.first >= stat.second)) {
download_progress_handler(stat.first, stat.second, success);
prev_tm = tm;
}
}
std::mutex lock;
std::map<std::string, std::pair<uint64_t, uint64_t>> items;
double prev_tm = 0;
DownloadProgressHandler download_progress_handler = nullptr;
};
static DownloadStats download_stats;
} // namespace
void installDownloadProgressHandler(DownloadProgressHandler handler) {
download_stats.installDownloadProgressHandler(handler);
}
std::string formattedDataSize(size_t size) {
if (size < 1024) {
return std::to_string(size) + " B";
} else if (size < 1024 * 1024) {
return util::string_format("%.2f KB", (float)size / 1024);
} else {
return util::string_format("%.2f MB", (float)size / (1024 * 1024));
}
}
size_t getRemoteFileSize(const std::string &url, std::atomic<bool> *abort) {
CURL *curl = curl_easy_init();
if (!curl) return -1;
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, dumy_write_cb);
curl_easy_setopt(curl, CURLOPT_HEADER, 1);
curl_easy_setopt(curl, CURLOPT_NOBODY, 1);
CURLM *cm = curl_multi_init();
curl_multi_add_handle(cm, curl);
int still_running = 1;
while (still_running > 0 && !(abort && *abort)) {
CURLMcode mc = curl_multi_perform(cm, &still_running);
if (!mc) curl_multi_wait(cm, nullptr, 0, 1000, nullptr);
}
double content_length = -1;
curl_easy_getinfo(curl, CURLINFO_CONTENT_LENGTH_DOWNLOAD, &content_length);
curl_multi_remove_handle(cm, curl);
curl_easy_cleanup(curl);
curl_multi_cleanup(cm);
return content_length > 0 ? (size_t)content_length : 0;
}
std::string getUrlWithoutQuery(const std::string &url) {
size_t idx = url.find("?");
return (idx == std::string::npos ? url : url.substr(0, idx));
}
template <class T>
bool httpDownload(const std::string &url, T &buf, size_t chunk_size, size_t content_length, std::atomic<bool> *abort) {
download_stats.add(url, content_length);
int parts = 1;
if (chunk_size > 0 && content_length > 10 * 1024 * 1024) {
parts = std::nearbyint(content_length / (float)chunk_size);
parts = std::clamp(parts, 1, 5);
}
CURLM *cm = curl_multi_init();
size_t written = 0;
std::map<CURL *, MultiPartWriter<T>> writers;
const int part_size = content_length / parts;
for (int i = 0; i < parts; ++i) {
CURL *eh = curl_easy_init();
writers[eh] = {
.buf = &buf,
.total_written = &written,
.offset = (size_t)(i * part_size),
.end = i == parts - 1 ? content_length : (i + 1) * part_size,
};
curl_easy_setopt(eh, CURLOPT_WRITEFUNCTION, write_cb<T>);
curl_easy_setopt(eh, CURLOPT_WRITEDATA, (void *)(&writers[eh]));
curl_easy_setopt(eh, CURLOPT_URL, url.c_str());
curl_easy_setopt(eh, CURLOPT_RANGE, util::string_format("%d-%d", writers[eh].offset, writers[eh].end - 1).c_str());
curl_easy_setopt(eh, CURLOPT_HTTPGET, 1);
curl_easy_setopt(eh, CURLOPT_NOSIGNAL, 1);
curl_easy_setopt(eh, CURLOPT_FOLLOWLOCATION, 1);
curl_multi_add_handle(cm, eh);
}
int still_running = 1;
while (still_running > 0 && !(abort && *abort)) {
curl_multi_wait(cm, nullptr, 0, 1000, nullptr);
curl_multi_perform(cm, &still_running);
download_stats.update(url, written);
}
CURLMsg *msg;
int msgs_left = -1;
int complete = 0;
while ((msg = curl_multi_info_read(cm, &msgs_left)) && !(abort && *abort)) {
if (msg->msg == CURLMSG_DONE) {
if (msg->data.result == CURLE_OK) {
long res_status = 0;
curl_easy_getinfo(msg->easy_handle, CURLINFO_RESPONSE_CODE, &res_status);
if (res_status == 206) {
complete++;
} else {
rWarning("Download failed: http error code: %d", res_status);
}
} else {
rWarning("Download failed: connection failure: %d", msg->data.result);
}
}
}
bool success = complete == parts;
download_stats.update(url, written, success);
download_stats.remove(url);
for (const auto &[e, w] : writers) {
curl_multi_remove_handle(cm, e);
curl_easy_cleanup(e);
}
curl_multi_cleanup(cm);
return success;
}
std::string httpGet(const std::string &url, size_t chunk_size, std::atomic<bool> *abort) {
size_t size = getRemoteFileSize(url, abort);
if (size == 0) return {};
std::string result(size, '\0');
return httpDownload(url, result, chunk_size, size, abort) ? result : "";
}
bool httpDownload(const std::string &url, const std::string &file, size_t chunk_size, std::atomic<bool> *abort) {
size_t size = getRemoteFileSize(url, abort);
if (size == 0) return false;
std::ofstream of(file, std::ios::binary | std::ios::out);
of.seekp(size - 1).write("\0", 1);
return httpDownload(url, of, chunk_size, size, abort);
}
std::string decompressBZ2(const std::string &in, std::atomic<bool> *abort) {
return decompressBZ2((std::byte *)in.data(), in.size(), abort);
}
std::string decompressBZ2(const std::byte *in, size_t in_size, std::atomic<bool> *abort) {
if (in_size == 0) return {};
bz_stream strm = {};
int bzerror = BZ2_bzDecompressInit(&strm, 0, 0);
assert(bzerror == BZ_OK);
strm.next_in = (char *)in;
strm.avail_in = in_size;
std::string out(in_size * 5, '\0');
do {
strm.next_out = (char *)(&out[strm.total_out_lo32]);
strm.avail_out = out.size() - strm.total_out_lo32;
const char *prev_write_pos = strm.next_out;
bzerror = BZ2_bzDecompress(&strm);
if (bzerror == BZ_OK && prev_write_pos == strm.next_out) {
// content is corrupt
bzerror = BZ_STREAM_END;
rWarning("decompressBZ2 error : content is corrupt");
break;
}
if (bzerror == BZ_OK && strm.avail_in > 0 && strm.avail_out == 0) {
out.resize(out.size() * 2);
}
} while (bzerror == BZ_OK && !(abort && *abort));
BZ2_bzDecompressEnd(&strm);
if (bzerror == BZ_STREAM_END && !(abort && *abort)) {
out.resize(strm.total_out_lo32);
return out;
}
return {};
}
void precise_nano_sleep(long sleep_ns) {
const long estimate_ns = 1 * 1e6; // 1ms
struct timespec req = {.tv_nsec = estimate_ns};
uint64_t start_sleep = nanos_since_boot();
while (sleep_ns > estimate_ns) {
nanosleep(&req, nullptr);
uint64_t end_sleep = nanos_since_boot();
sleep_ns -= (end_sleep - start_sleep);
start_sleep = end_sleep;
}
// spin wait
if (sleep_ns > 0) {
while ((nanos_since_boot() - start_sleep) <= sleep_ns) {
std::this_thread::yield();
}
}
}
std::string sha256(const std::string &str) {
unsigned char hash[SHA256_DIGEST_LENGTH];
SHA256_CTX sha256;
SHA256_Init(&sha256);
SHA256_Update(&sha256, str.c_str(), str.size());
SHA256_Final(hash, &sha256);
return util::hexdump(hash, SHA256_DIGEST_LENGTH);
}

34
tools/replay/util.h Normal file
View File

@@ -0,0 +1,34 @@
#pragma once
#include <atomic>
#include <functional>
#include <string>
enum class ReplyMsgType {
Info,
Debug,
Warning,
Critical
};
typedef std::function<void(ReplyMsgType type, const std::string msg)> ReplayMessageHandler;
void installMessageHandler(ReplayMessageHandler);
void logMessage(ReplyMsgType type, const char* fmt, ...);
#define rInfo(fmt, ...) ::logMessage(ReplyMsgType::Info, fmt, ## __VA_ARGS__)
#define rDebug(fmt, ...) ::logMessage(ReplyMsgType::Debug, fmt, ## __VA_ARGS__)
#define rWarning(fmt, ...) ::logMessage(ReplyMsgType::Warning, fmt, ## __VA_ARGS__)
#define rError(fmt, ...) ::logMessage(ReplyMsgType::Critical , fmt, ## __VA_ARGS__)
std::string sha256(const std::string &str);
void precise_nano_sleep(long sleep_ns);
std::string decompressBZ2(const std::string &in, std::atomic<bool> *abort = nullptr);
std::string decompressBZ2(const std::byte *in, size_t in_size, std::atomic<bool> *abort = nullptr);
std::string getUrlWithoutQuery(const std::string &url);
size_t getRemoteFileSize(const std::string &url, std::atomic<bool> *abort = nullptr);
std::string httpGet(const std::string &url, size_t chunk_size = 0, std::atomic<bool> *abort = nullptr);
typedef std::function<void(uint64_t cur, uint64_t total, bool success)> DownloadProgressHandler;
void installDownloadProgressHandler(DownloadProgressHandler);
bool httpDownload(const std::string &url, const std::string &file, size_t chunk_size = 0, std::atomic<bool> *abort = nullptr);
std::string formattedDataSize(size_t size);