This commit is contained in:
Your Name
2024-04-27 13:43:16 -05:00
parent 21363ce751
commit ea1aad5ed1
128 changed files with 3533 additions and 1918 deletions

6
selfdrive/ui/tests/.gitignore vendored Normal file
View File

@@ -0,0 +1,6 @@
test
playsound
test_sound
test_translations
ui_snapshot
test_ui/report

View File

View File

@@ -0,0 +1,22 @@
#!/usr/bin/env python3
import time
import cereal.messaging as messaging
if __name__ == "__main__":
while True:
pm = messaging.PubMaster(['carParams', 'carState'])
batt = 1.
while True:
msg = messaging.new_message('carParams')
msg.carParams.carName = "COMMA BODY"
msg.carParams.notCar = True
pm.send('carParams', msg)
for b in range(100, 0, -1):
msg = messaging.new_message('carState')
msg.carState.charging = True
msg.carState.fuelGauge = b / 100.
pm.send('carState', msg)
time.sleep(0.1)
time.sleep(1)

View File

@@ -0,0 +1,18 @@
#!/bin/bash
set -e
UI_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"/..
TEST_TEXT="(WRAPPED_SOURCE_TEXT)"
TEST_TS_FILE=$UI_DIR/translations/main_test_en.ts
TEST_QM_FILE=$UI_DIR/translations/main_test_en.qm
# translation strings
UNFINISHED="<translation type=\"unfinished\"><\/translation>"
TRANSLATED="<translation>$TEST_TEXT<\/translation>"
mkdir -p $UI_DIR/translations
rm -f $TEST_TS_FILE $TEST_QM_FILE
lupdate -recursive "$UI_DIR" -ts $TEST_TS_FILE
sed -i "s/$UNFINISHED/$TRANSLATED/" $TEST_TS_FILE
lrelease $TEST_TS_FILE

View File

@@ -0,0 +1,36 @@
#!/usr/bin/env python3
import os
import sys
import time
import json
from openpilot.common.basedir import BASEDIR
from openpilot.common.params import Params
from openpilot.selfdrive.controls.lib.alertmanager import set_offroad_alert
if __name__ == "__main__":
params = Params()
with open(os.path.join(BASEDIR, "selfdrive/controls/lib/alerts_offroad.json")) as f:
offroad_alerts = json.load(f)
t = 10 if len(sys.argv) < 2 else int(sys.argv[1])
while True:
print("setting alert update")
params.put_bool("UpdateAvailable", True)
r = open(os.path.join(BASEDIR, "RELEASES.md")).read()
r = r[:r.find('\n\n')] # Slice latest release notes
params.put("UpdaterNewReleaseNotes", r + "\n")
time.sleep(t)
params.put_bool("UpdateAvailable", False)
# cycle through normal alerts
for a in offroad_alerts:
print("setting alert:", a)
set_offroad_alert(a, True)
time.sleep(t)
set_offroad_alert(a, False)
print("no alert")
time.sleep(t)

View File

@@ -0,0 +1,30 @@
#include <QApplication>
#include <QSoundEffect>
#include <QTimer>
#include <QDebug>
int main(int argc, char **argv) {
QApplication a(argc, argv);
QTimer::singleShot(0, [=]{
QSoundEffect s;
const char *vol = getenv("VOLUME");
s.setVolume(vol ? atof(vol) : 1.0);
for (int i = 1; i < argc; i++) {
QString fn = argv[i];
qDebug() << "playing" << fn;
QEventLoop loop;
s.setSource(QUrl::fromLocalFile(fn));
QEventLoop::connect(&s, &QSoundEffect::loadedChanged, &loop, &QEventLoop::quit);
loop.exec();
s.play();
QEventLoop::connect(&s, &QSoundEffect::playingChanged, &loop, &QEventLoop::quit);
loop.exec();
}
QCoreApplication::exit();
});
return a.exec();
}

View File

@@ -0,0 +1,25 @@
#define CATCH_CONFIG_RUNNER
#include "catch2/catch.hpp"
#include <QApplication>
#include <QDebug>
#include <QDir>
#include <QTranslator>
int main(int argc, char **argv) {
// unit tests for Qt
QApplication app(argc, argv);
QString language_file = "main_test_en";
qDebug() << "Loading language:" << language_file;
QTranslator translator;
QString translationsPath = QDir::cleanPath(qApp->applicationDirPath() + "/../translations");
if (!translator.load(language_file, translationsPath)) {
qDebug() << "Failed to load translation file!";
}
app.installTranslator(&translator);
const int res = Catch::Session().run(argc, argv);
return (res < 0xff ? res : 0xff);
}

View File

@@ -0,0 +1,41 @@
#!/usr/bin/env python3
import unittest
from cereal import car
from cereal import messaging
from cereal.messaging import SubMaster, PubMaster
from openpilot.selfdrive.ui.soundd import CONTROLS_TIMEOUT, check_controls_timeout_alert
import time
AudibleAlert = car.CarControl.HUDControl.AudibleAlert
class TestSoundd(unittest.TestCase):
def test_check_controls_timeout_alert(self):
sm = SubMaster(['controlsState'])
pm = PubMaster(['controlsState'])
for _ in range(100):
cs = messaging.new_message('controlsState')
cs.controlsState.enabled = True
pm.send("controlsState", cs)
time.sleep(0.01)
sm.update(0)
self.assertFalse(check_controls_timeout_alert(sm))
for _ in range(CONTROLS_TIMEOUT * 110):
sm.update(0)
time.sleep(0.01)
self.assertTrue(check_controls_timeout_alert(sm))
# TODO: add test with micd for checking that soundd actually outputs sounds
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,48 @@
#include "catch2/catch.hpp"
#include "common/params.h"
#include "selfdrive/ui/qt/window.h"
const QString TEST_TEXT = "(WRAPPED_SOURCE_TEXT)"; // what each string should be translated to
QRegExp RE_NUM("\\d*");
QStringList getParentWidgets(QWidget* widget){
QStringList parentWidgets;
while (widget->parentWidget() != Q_NULLPTR) {
widget = widget->parentWidget();
parentWidgets.append(widget->metaObject()->className());
}
return parentWidgets;
}
template <typename T>
void checkWidgetTrWrap(MainWindow &w) {
for (auto widget : w.findChildren<T>()) {
const QString text = widget->text();
bool isNumber = RE_NUM.exactMatch(text);
bool wrapped = text.contains(TEST_TEXT);
QString parentWidgets = getParentWidgets(widget).join("->");
if (!text.isEmpty() && !isNumber && !wrapped) {
FAIL(("\"" + text + "\" must be wrapped. Parent widgets: " + parentWidgets).toStdString());
}
// warn if source string wrapped, but UI adds text
// TODO: add way to ignore this
if (wrapped && text != TEST_TEXT) {
WARN(("\"" + text + "\" is dynamic and needs a custom retranslate function. Parent widgets: " + parentWidgets).toStdString());
}
}
}
// Tests all strings in the UI are wrapped with tr()
TEST_CASE("UI: test all strings wrapped") {
Params().remove("LanguageSetting");
Params().remove("HardwareSerial");
Params().remove("DongleId");
qputenv("TICI", "1");
MainWindow w;
checkWidgetTrWrap<QPushButton*>(w);
checkWidgetTrWrap<QLabel*>(w);
}

View File

@@ -12,7 +12,7 @@ from parameterized import parameterized_class
from openpilot.selfdrive.ui.update_translations import TRANSLATIONS_DIR, LANGUAGES_FILE, update_translations
with open(LANGUAGES_FILE, "r") as f:
with open(LANGUAGES_FILE) as f:
translation_files = json.load(f)
UNFINISHED_TRANSLATION_TAG = "<translation type=\"unfinished\"" # non-empty translations can be marked unfinished
@@ -28,7 +28,7 @@ class TestTranslations(unittest.TestCase):
@staticmethod
def _read_translation_file(path, file):
tr_file = os.path.join(path, f"{file}.ts")
with open(tr_file, "r") as f:
with open(tr_file) as f:
return f.read()
def test_missing_translation_files(self):
@@ -83,7 +83,7 @@ class TestTranslations(unittest.TestCase):
for nf in numerusform:
self.assertIsNotNone(nf, f"Ensure all plural translation forms are completed: {source_text}")
self.assertIn("%n", nf, "Ensure numerus argument (%n) exists in translation.")
self.assertIsNone(FORMAT_ARG.search(nf), "Plural translations must use %n, not %1, %2, etc.: {}".format(numerusform))
self.assertIsNone(FORMAT_ARG.search(nf), f"Plural translations must use %n, not %1, %2, etc.: {numerusform}")
else:
self.assertIsNotNone(translation.text, f"Ensure translation is completed: {source_text}")

View File

@@ -0,0 +1,198 @@
from collections import namedtuple
import pathlib
import shutil
import sys
import jinja2
import matplotlib.pyplot as plt
import numpy as np
import os
import pywinctl
import time
import unittest
from parameterized import parameterized
from cereal import messaging, car, log
from cereal.visionipc import VisionIpcServer, VisionStreamType
from cereal.messaging import SubMaster, PubMaster
from openpilot.common.mock import mock_messages
from openpilot.common.params import Params
from openpilot.common.realtime import DT_MDL
from openpilot.common.transformations.camera import DEVICE_CAMERAS
from openpilot.selfdrive.test.helpers import with_processes
from openpilot.selfdrive.test.process_replay.vision_meta import meta_from_camera_state
from openpilot.tools.webcam.camera import Camera
UI_DELAY = 0.5 # may be slower on CI?
NetworkType = log.DeviceState.NetworkType
NetworkStrength = log.DeviceState.NetworkStrength
EventName = car.CarEvent.EventName
EVENTS_BY_NAME = {v: k for k, v in EventName.schema.enumerants.items()}
def setup_common(click, pm: PubMaster):
Params().put("DongleId", "123456789012345")
dat = messaging.new_message('deviceState')
dat.deviceState.started = True
dat.deviceState.networkType = NetworkType.cell4G
dat.deviceState.networkStrength = NetworkStrength.moderate
dat.deviceState.freeSpacePercent = 80
dat.deviceState.memoryUsagePercent = 2
dat.deviceState.cpuTempC = [2,]*3
dat.deviceState.gpuTempC = [2,]*3
dat.deviceState.cpuUsagePercent = [2,]*8
pm.send("deviceState", dat)
def setup_homescreen(click, pm: PubMaster):
setup_common(click, pm)
def setup_settings_device(click, pm: PubMaster):
setup_common(click, pm)
click(100, 100)
def setup_settings_network(click, pm: PubMaster):
setup_common(click, pm)
setup_settings_device(click, pm)
click(300, 600)
def setup_onroad(click, pm: PubMaster):
setup_common(click, pm)
dat = messaging.new_message('pandaStates', 1)
dat.pandaStates[0].ignitionLine = True
dat.pandaStates[0].pandaType = log.PandaState.PandaType.uno
pm.send("pandaStates", dat)
d = DEVICE_CAMERAS[("tici", "ar0231")]
server = VisionIpcServer("camerad")
server.create_buffers(VisionStreamType.VISION_STREAM_ROAD, 40, False, d.fcam.width, d.fcam.height)
server.create_buffers(VisionStreamType.VISION_STREAM_DRIVER, 40, False, d.dcam.width, d.dcam.height)
server.create_buffers(VisionStreamType.VISION_STREAM_WIDE_ROAD, 40, False, d.fcam.width, d.fcam.height)
server.start_listener()
time.sleep(0.5) # give time for vipc server to start
IMG = Camera.bgr2nv12(np.random.randint(0, 255, (d.fcam.width, d.fcam.height, 3), dtype=np.uint8))
IMG_BYTES = IMG.flatten().tobytes()
cams = ('roadCameraState', 'wideRoadCameraState')
frame_id = 0
for cam in cams:
msg = messaging.new_message(cam)
cs = getattr(msg, cam)
cs.frameId = frame_id
cs.timestampSof = int((frame_id * DT_MDL) * 1e9)
cs.timestampEof = int((frame_id * DT_MDL) * 1e9)
cam_meta = meta_from_camera_state(cam)
pm.send(msg.which(), msg)
server.send(cam_meta.stream, IMG_BYTES, cs.frameId, cs.timestampSof, cs.timestampEof)
@mock_messages(['liveLocationKalman'])
def setup_onroad_map(click, pm: PubMaster):
setup_onroad(click, pm)
click(500, 500)
time.sleep(UI_DELAY) # give time for the map to render
def setup_onroad_sidebar(click, pm: PubMaster):
setup_onroad_map(click, pm)
click(500, 500)
CASES = {
"homescreen": setup_homescreen,
"settings_device": setup_settings_device,
"settings_network": setup_settings_network,
"onroad": setup_onroad,
"onroad_map": setup_onroad_map,
"onroad_sidebar": setup_onroad_sidebar
}
TEST_DIR = pathlib.Path(__file__).parent
TEST_OUTPUT_DIR = TEST_DIR / "report"
SCREENSHOTS_DIR = TEST_OUTPUT_DIR / "screenshots"
class TestUI(unittest.TestCase):
@classmethod
def setUpClass(cls):
os.environ["SCALE"] = "1"
sys.modules["mouseinfo"] = False
@classmethod
def tearDownClass(cls):
del sys.modules["mouseinfo"]
def setup(self):
self.sm = SubMaster(["uiDebug"])
self.pm = PubMaster(["deviceState", "pandaStates", "controlsState", 'roadCameraState', 'wideRoadCameraState', 'liveLocationKalman'])
while not self.sm.valid["uiDebug"]:
self.sm.update(1)
time.sleep(UI_DELAY) # wait a bit more for the UI to start rendering
try:
self.ui = pywinctl.getWindowsWithTitle("ui")[0]
except Exception as e:
print(f"failed to find ui window, assuming that it's in the top left (for Xvfb) {e}")
self.ui = namedtuple("bb", ["left", "top", "width", "height"])(0,0,2160,1080)
def screenshot(self):
import pyautogui
im = pyautogui.screenshot(region=(self.ui.left, self.ui.top, self.ui.width, self.ui.height))
self.assertEqual(im.width, 2160)
self.assertEqual(im.height, 1080)
img = np.array(im)
im.close()
return img
def click(self, x, y, *args, **kwargs):
import pyautogui
pyautogui.click(self.ui.left + x, self.ui.top + y, *args, **kwargs)
time.sleep(UI_DELAY) # give enough time for the UI to react
@parameterized.expand(CASES.items())
@with_processes(["ui"])
def test_ui(self, name, setup_case):
self.setup()
setup_case(self.click, self.pm)
time.sleep(UI_DELAY) # wait a bit more for the UI to finish rendering
im = self.screenshot()
plt.imsave(SCREENSHOTS_DIR / f"{name}.png", im)
def create_html_report():
OUTPUT_FILE = TEST_OUTPUT_DIR / "index.html"
with open(TEST_DIR / "template.html") as f:
template = jinja2.Template(f.read())
cases = {f.stem: (str(f.relative_to(TEST_OUTPUT_DIR)), "reference.png") for f in SCREENSHOTS_DIR.glob("*.png")}
cases = dict(sorted(cases.items()))
with open(OUTPUT_FILE, "w") as f:
f.write(template.render(cases=cases))
def create_screenshots():
if TEST_OUTPUT_DIR.exists():
shutil.rmtree(TEST_OUTPUT_DIR)
SCREENSHOTS_DIR.mkdir(parents=True)
unittest.main(exit=False)
if __name__ == "__main__":
print("creating test screenshots")
create_screenshots()
print("creating html report")
create_html_report()

View File

@@ -0,0 +1,34 @@
<html>
<style>
.column {
float: left;
width: 50%;
padding: 5px;
}
.row::after {
content: "";
clear: both;
display: table;
}
.image {
width: 100%;
}
</style>
{% for name, (image, ref_image) in cases.items() %}
<h1>{{name}}</h1>
<div class="row">
<div class="column">
<img class="image" src="{{ image }}" />
</div>
</div>
<br>
{% endfor %}
</html>

View File

@@ -0,0 +1,66 @@
#include "selfdrive/ui/tests/ui_snapshot.h"
#include <QApplication>
#include <QCommandLineParser>
#include <QDir>
#include <QImage>
#include <QPainter>
#include "selfdrive/ui/qt/home.h"
#include "selfdrive/ui/qt/util.h"
#include "selfdrive/ui/qt/window.h"
#include "selfdrive/ui/ui.h"
void saveWidgetAsImage(QWidget *widget, const QString &fileName) {
QImage image(widget->size(), QImage::Format_ARGB32);
QPainter painter(&image);
widget->render(&painter);
image.save(fileName);
}
int main(int argc, char *argv[]) {
initApp(argc, argv);
QApplication app(argc, argv);
QCommandLineParser parser;
parser.setApplicationDescription("Take a snapshot of the UI.");
parser.addHelpOption();
parser.addOption(QCommandLineOption(QStringList() << "o"
<< "output",
"Output image file path. The file's suffix is used to "
"determine the format. Supports PNG and JPEG formats. "
"Defaults to \"snapshot.png\".",
"file", "snapshot.png"));
parser.process(app);
const QString output = parser.value("output");
if (output.isEmpty()) {
qCritical() << "No output file specified";
return 1;
}
auto current = QDir::current();
// change working directory to find assets
if (!QDir::setCurrent(QCoreApplication::applicationDirPath() + QDir::separator() + "..")) {
qCritical() << "Failed to set current directory";
return 1;
}
MainWindow w;
w.setFixedSize(2160, 1080);
w.show();
app.installEventFilter(&w);
// restore working directory
QDir::setCurrent(current.absolutePath());
// wait for the UI to update
QObject::connect(uiState(), &UIState::uiUpdate, [&](const UIState &s) {
saveWidgetAsImage(&w, output);
app.quit();
});
return app.exec();
}

View File

@@ -0,0 +1,5 @@
#pragma once
#include <QWidget>
void saveWidgetAsImage(QWidget *widget, const QString &fileName);