openpilot v0.9.6 release
date: 2024-01-12T10:13:37 master commit: ba792d576a49a0899b88a753fa1c52956bedf9e6
This commit is contained in:
0
tools/__init__.py
Normal file
0
tools/__init__.py
Normal file
103
tools/bodyteleop/static/index.html
Normal file
103
tools/bodyteleop/static/index.html
Normal file
@@ -0,0 +1,103 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>commabody</title>
|
||||
<link rel="stylesheet" href="/static/main.css">
|
||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/bootstrap/5.2.3/css/bootstrap.min.css" integrity="sha512-SbiR/eusphKoMVVXysTKG/7VseWii+Y3FdHrt0EpKgpToZeemhqHeZeLWLhJutz/2ut2Vw1uQEj2MbRF+TVBUA==" crossorigin="anonymous" referrerpolicy="no-referrer" /><script src="https://code.jquery.com/jquery-3.6.0.min.js"></script>
|
||||
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.10.5/font/bootstrap-icons.css">
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/bootstrap/5.2.3/js/bootstrap.min.js" integrity="sha512-1/RvZTcCDEUjY/CypiMz+iqqtaoQfAITmNSJY17Myp4Ms5mdxPS5UV7iOfdZoxcGhzFbOm6sntTKJppjvuhg4g==" crossorigin="anonymous" referrerpolicy="no-referrer"></script>
|
||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.29.1/moment.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/chart.js@^3"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/moment@^2"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/chartjs-adapter-moment@^1"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="main">
|
||||
<p class="jumbo">comma body</p>
|
||||
<audio id="audio" autoplay="true"></audio>
|
||||
<video id="video" playsinline autoplay muted loop poster="/static/poster.png"></video>
|
||||
<div id="icon-panel" class="row">
|
||||
<div class="col-sm-12 col-md-6 details">
|
||||
<div class="icon-sup-panel col-12">
|
||||
<div class="icon-sub-panel">
|
||||
<div class="icon-sub-sub-panel">
|
||||
<i class="bi bi-speaker-fill pre-blob"></i>
|
||||
<i class="bi bi-mic-fill pre-blob"></i>
|
||||
<i class="bi bi-camera-video-fill pre-blob"></i>
|
||||
</div>
|
||||
<p class="small">body</p>
|
||||
</div>
|
||||
<div class="icon-sub-panel">
|
||||
<div class="icon-sub-sub-panel">
|
||||
<i class="bi bi-speaker-fill pre-blob"></i>
|
||||
<i class="bi bi-mic-fill pre-blob"></i>
|
||||
</div>
|
||||
<p class="small">you</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-sm-12 col-md-6 details">
|
||||
<div class="icon-sup-panel col-12">
|
||||
<div class="icon-sub-panel">
|
||||
<div class="icon-sub-sub-panel">
|
||||
<i id="ping-time" class="pre-blob1">-</i>
|
||||
</div>
|
||||
<p class="bi bi-arrow-repeat small"> ping time</p>
|
||||
</div>
|
||||
<div class="icon-sub-panel">
|
||||
<div class="icon-sub-sub-panel">
|
||||
<i id="battery" class="pre-blob1">-</i>
|
||||
</div>
|
||||
<p class="bi bi-battery-half small"> battery</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- <div class="icon-sub-panel">
|
||||
<button type="button" id="start" class="btn btn-light btn-lg">Start</button>
|
||||
<button type="button" id="stop" class="btn btn-light btn-lg">Stop</button>
|
||||
</div> -->
|
||||
</div>
|
||||
<div class="row" style="width: 100%; padding: 0px 10px 0px 10px;">
|
||||
<div id="wasd" class="col-md-12 row">
|
||||
<div class="col-md-6 col-sm-12" style="justify-content: center; display: flex; flex-direction: column;">
|
||||
<div class="wasd-row">
|
||||
<div class="keys" id="key-w">W</div>
|
||||
<div id="key-val"><span id="pos-vals">0,0</span><span>x,y</span></div>
|
||||
</div>
|
||||
<div class="wasd-row">
|
||||
<div class="keys" id="key-a">A</div>
|
||||
<div class="keys" id="key-s">S</div>
|
||||
<div class="keys" id="key-d">D</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-6 col-sm-12 form-group plan-form">
|
||||
<label for="plan-text">Plan (w, a, s, d, t)</label>
|
||||
<label style="font-size: 15px;" for="plan-text">*Extremely Experimental*</label>
|
||||
<textarea class="form-control" id="plan-text" rows="7" placeholder="1,0,0,0,2"></textarea>
|
||||
<button type="button" id="plan-button" class="btn btn-light btn-lg">Execute</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row" style="padding: 0px 10px 0px 10px; width: 100%;">
|
||||
<div class="panel row">
|
||||
<div class="col-sm-3" style="text-align: center;">
|
||||
<p>Play Sounds</p>
|
||||
</div>
|
||||
<div class="btn-group col-sm-8">
|
||||
<button type="button" id="sound-engage" class="btn btn-outline-success btn-lg sound">Engage</button>
|
||||
<button type="button" id="sound-disengage" class="btn btn-outline-warning btn-lg sound">Disengage</button>
|
||||
<button type="button" id="sound-error" class="btn btn-outline-danger btn-lg sound">Error</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row" style="padding: 0px 10px 0px 10px; width: 100%;">
|
||||
<div class="panel row">
|
||||
<div class="col-sm-6"><canvas id="chart-ping"></canvas></div>
|
||||
<div class="col-sm-6"><canvas id="chart-battery"></canvas></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<script src="/static/js/jsmain.js" type="module"></script>
|
||||
</body>
|
||||
</html>
|
||||
54
tools/bodyteleop/static/js/controls.js
vendored
Normal file
54
tools/bodyteleop/static/js/controls.js
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
const keyVals = {w: 0, a: 0, s: 0, d: 0}
|
||||
|
||||
export function getXY() {
|
||||
let x = -keyVals.w + keyVals.s
|
||||
let y = -keyVals.d + keyVals.a
|
||||
return {x, y}
|
||||
}
|
||||
|
||||
export const handleKeyX = (key, setValue) => {
|
||||
if (['w', 'a', 's', 'd'].includes(key)){
|
||||
keyVals[key] = setValue;
|
||||
let color = "#333";
|
||||
if (setValue === 1){
|
||||
color = "#e74c3c";
|
||||
}
|
||||
$("#key-"+key).css('background', color);
|
||||
const {x, y} = getXY();
|
||||
$("#pos-vals").text(x+","+y);
|
||||
}
|
||||
};
|
||||
|
||||
export async function executePlan() {
|
||||
let plan = $("#plan-text").val();
|
||||
const planList = [];
|
||||
plan.split("\n").forEach(function(e){
|
||||
let line = e.split(",").map(k=>parseInt(k));
|
||||
if (line.length != 5 || line.slice(0, 4).map(e=>[1, 0].includes(e)).includes(false) || line[4] < 0 || line[4] > 10){
|
||||
console.log("invalid plan");
|
||||
}
|
||||
else{
|
||||
planList.push(line)
|
||||
}
|
||||
});
|
||||
|
||||
async function execute() {
|
||||
for (var i = 0; i < planList.length; i++) {
|
||||
let [w, a, s, d, t] = planList[i];
|
||||
while(t > 0){
|
||||
console.log(w, a, s, d, t);
|
||||
if(w==1){$("#key-w").mousedown();}
|
||||
if(a==1){$("#key-a").mousedown();}
|
||||
if(s==1){$("#key-s").mousedown();}
|
||||
if(d==1){$("#key-d").mousedown();}
|
||||
await sleep(50);
|
||||
$("#key-w").mouseup();
|
||||
$("#key-a").mouseup();
|
||||
$("#key-s").mouseup();
|
||||
$("#key-d").mouseup();
|
||||
t = t - 0.05;
|
||||
}
|
||||
}
|
||||
}
|
||||
execute();
|
||||
}
|
||||
27
tools/bodyteleop/static/js/jsmain.js
Normal file
27
tools/bodyteleop/static/js/jsmain.js
Normal file
@@ -0,0 +1,27 @@
|
||||
import { handleKeyX, executePlan } from "./controls.js";
|
||||
import { start, stop, lastChannelMessageTime, playSoundRequest } from "./webrtc.js";
|
||||
|
||||
export var pc = null;
|
||||
export var dc = null;
|
||||
|
||||
document.addEventListener('keydown', (e)=>(handleKeyX(e.key.toLowerCase(), 1)));
|
||||
document.addEventListener('keyup', (e)=>(handleKeyX(e.key.toLowerCase(), 0)));
|
||||
$(".keys").bind("mousedown touchstart", (e)=>handleKeyX($(e.target).attr('id').replace('key-', ''), 1));
|
||||
$(".keys").bind("mouseup touchend", (e)=>handleKeyX($(e.target).attr('id').replace('key-', ''), 0));
|
||||
$("#plan-button").click(executePlan);
|
||||
$(".sound").click((e)=>{
|
||||
const sound = $(e.target).attr('id').replace('sound-', '')
|
||||
return playSoundRequest(sound);
|
||||
});
|
||||
|
||||
setInterval( () => {
|
||||
const dt = new Date().getTime();
|
||||
if ((dt - lastChannelMessageTime) > 1000) {
|
||||
$(".pre-blob").removeClass('blob');
|
||||
$("#battery").text("-");
|
||||
$("#ping-time").text('-');
|
||||
$("video")[0].load();
|
||||
}
|
||||
}, 5000);
|
||||
|
||||
start(pc, dc);
|
||||
53
tools/bodyteleop/static/js/plots.js
Normal file
53
tools/bodyteleop/static/js/plots.js
Normal file
@@ -0,0 +1,53 @@
|
||||
export const pingPoints = [];
|
||||
export const batteryPoints = [];
|
||||
|
||||
function getChartConfig(pts, color, title, ymax=100) {
|
||||
return {
|
||||
type: 'line',
|
||||
data: {
|
||||
datasets: [{
|
||||
label: title,
|
||||
data: pts,
|
||||
borderWidth: 1,
|
||||
borderColor: color,
|
||||
backgroundColor: color,
|
||||
fill: 'origin'
|
||||
}]
|
||||
},
|
||||
options: {
|
||||
scales: {
|
||||
x: {
|
||||
type: 'time',
|
||||
time: {
|
||||
unit: 'minute',
|
||||
displayFormats: {
|
||||
second: 'h:mm a'
|
||||
}
|
||||
},
|
||||
grid: {
|
||||
color: '#222', // Grid lines color
|
||||
},
|
||||
ticks: {
|
||||
source: 'data',
|
||||
fontColor: 'rgba(255, 255, 255, 1.0)', // Y-axis label color
|
||||
}
|
||||
},
|
||||
y: {
|
||||
beginAtZero: true,
|
||||
max: ymax,
|
||||
grid: {
|
||||
color: 'rgba(255, 255, 255, 0.1)', // Grid lines color
|
||||
},
|
||||
ticks: {
|
||||
fontColor: 'rgba(255, 255, 255, 0.7)', // Y-axis label color
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const ctxPing = document.getElementById('chart-ping');
|
||||
const ctxBattery = document.getElementById('chart-battery');
|
||||
export const chartPing = new Chart(ctxPing, getChartConfig(pingPoints, 'rgba(192, 57, 43, 0.7)', 'Controls Ping Time (ms)', 250));
|
||||
export const chartBattery = new Chart(ctxBattery, getChartConfig(batteryPoints, 'rgba(41, 128, 185, 0.7)', 'Battery %', 100));
|
||||
209
tools/bodyteleop/static/js/webrtc.js
Normal file
209
tools/bodyteleop/static/js/webrtc.js
Normal file
@@ -0,0 +1,209 @@
|
||||
import { getXY } from "./controls.js";
|
||||
import { pingPoints, batteryPoints, chartPing, chartBattery } from "./plots.js";
|
||||
|
||||
export let controlCommandInterval = null;
|
||||
export let latencyInterval = null;
|
||||
export let lastChannelMessageTime = null;
|
||||
|
||||
|
||||
export function offerRtcRequest(sdp, type) {
|
||||
return fetch('/offer', {
|
||||
body: JSON.stringify({sdp: sdp, type: type}),
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
method: 'POST'
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
export function playSoundRequest(sound) {
|
||||
return fetch('/sound', {
|
||||
body: JSON.stringify({sound}),
|
||||
headers: {'Content-Type': 'application/json'},
|
||||
method: 'POST'
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
export function pingHeadRequest() {
|
||||
return fetch('/', {
|
||||
method: 'HEAD'
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
export function createPeerConnection(pc) {
|
||||
var config = {
|
||||
sdpSemantics: 'unified-plan'
|
||||
};
|
||||
|
||||
pc = new RTCPeerConnection(config);
|
||||
|
||||
// connect audio / video
|
||||
pc.addEventListener('track', function(evt) {
|
||||
console.log("Adding Tracks!")
|
||||
if (evt.track.kind == 'video')
|
||||
document.getElementById('video').srcObject = evt.streams[0];
|
||||
else
|
||||
document.getElementById('audio').srcObject = evt.streams[0];
|
||||
});
|
||||
return pc;
|
||||
}
|
||||
|
||||
|
||||
export function negotiate(pc) {
|
||||
return pc.createOffer({offerToReceiveAudio:true, offerToReceiveVideo:true}).then(function(offer) {
|
||||
return pc.setLocalDescription(offer);
|
||||
}).then(function() {
|
||||
return new Promise(function(resolve) {
|
||||
if (pc.iceGatheringState === 'complete') {
|
||||
resolve();
|
||||
}
|
||||
else {
|
||||
function checkState() {
|
||||
if (pc.iceGatheringState === 'complete') {
|
||||
pc.removeEventListener('icegatheringstatechange', checkState);
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
pc.addEventListener('icegatheringstatechange', checkState);
|
||||
}
|
||||
});
|
||||
}).then(function() {
|
||||
var offer = pc.localDescription;
|
||||
return offerRtcRequest(offer.sdp, offer.type);
|
||||
}).then(function(response) {
|
||||
console.log(response);
|
||||
return response.json();
|
||||
}).then(function(answer) {
|
||||
return pc.setRemoteDescription(answer);
|
||||
}).catch(function(e) {
|
||||
alert(e);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
function isMobile() {
|
||||
let check = false;
|
||||
(function(a){if(/(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i.test(a)||/1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\-(n|u)|c55\/|capi|ccwa|cdm\-|cell|chtm|cldc|cmd\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\-s|devi|dica|dmob|do(c|p)o|ds(12|\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\-|_)|g1 u|g560|gene|gf\-5|g\-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd\-(m|p|t)|hei\-|hi(pt|ta)|hp( i|ip)|hs\-c|ht(c(\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\-(20|go|ma)|i230|iac( |\-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc\-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|\-[a-w])|libw|lynx|m1\-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\-2|po(ck|rt|se)|prox|psio|pt\-g|qa\-a|qc(07|12|21|32|60|\-[2-7]|i\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\-|oo|p\-)|sdk\/|se(c(\-|0|1)|47|mc|nd|ri)|sgh\-|shar|sie(\-|m)|sk\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\-|v\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\-|tdg\-|tel(i|m)|tim\-|t\-mo|to(pl|sh)|ts(70|m\-|m3|m5)|tx\-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\-|your|zeto|zte\-/i.test(a.substr(0,4))) check = true;})(navigator.userAgent||navigator.vendor||window.opera);
|
||||
return check;
|
||||
};
|
||||
|
||||
|
||||
export const constraints = {
|
||||
audio: {
|
||||
autoGainControl: false,
|
||||
sampleRate: 48000,
|
||||
sampleSize: 16,
|
||||
echoCancellation: true,
|
||||
noiseSuppression: true,
|
||||
channelCount: 1
|
||||
},
|
||||
video: isMobile()
|
||||
};
|
||||
|
||||
|
||||
export function start(pc, dc) {
|
||||
pc = createPeerConnection(pc);
|
||||
|
||||
// add audio track
|
||||
navigator.mediaDevices.enumerateDevices()
|
||||
.then(function(devices) {
|
||||
const hasAudioInput = devices.find((device) => device.kind === "audioinput");
|
||||
var modifiedConstraints = {};
|
||||
modifiedConstraints.video = constraints.video;
|
||||
modifiedConstraints.audio = hasAudioInput ? constraints.audio : false;
|
||||
|
||||
return Promise.resolve(modifiedConstraints);
|
||||
})
|
||||
.then(function(constraints) {
|
||||
if (constraints.audio || constraints.video) {
|
||||
return navigator.mediaDevices.getUserMedia(constraints);
|
||||
} else{
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
})
|
||||
.then(function(stream) {
|
||||
if (stream) {
|
||||
stream.getTracks().forEach(function(track) {
|
||||
pc.addTrack(track, stream);
|
||||
});
|
||||
}
|
||||
|
||||
return negotiate(pc);
|
||||
})
|
||||
.catch(function(err) {
|
||||
alert('Could not acquire media: ' + err);
|
||||
});
|
||||
|
||||
var parameters = {"ordered": true};
|
||||
dc = pc.createDataChannel('data', parameters);
|
||||
dc.onclose = function() {
|
||||
clearInterval(controlCommandInterval);
|
||||
clearInterval(latencyInterval);
|
||||
};
|
||||
|
||||
function sendJoystickOverDataChannel() {
|
||||
const {x, y} = getXY();
|
||||
var message = JSON.stringify({type: "testJoystick", data: {axes: [x, y], buttons: [false]}})
|
||||
dc.send(message);
|
||||
}
|
||||
function checkLatency() {
|
||||
const initialTime = new Date().getTime();
|
||||
pingHeadRequest().then(function() {
|
||||
const currentTime = new Date().getTime();
|
||||
if (Math.abs(currentTime - lastChannelMessageTime) < 1000) {
|
||||
const pingtime = currentTime - initialTime;
|
||||
pingPoints.push({'x': currentTime, 'y': pingtime});
|
||||
if (pingPoints.length > 1000) {
|
||||
pingPoints.shift();
|
||||
}
|
||||
chartPing.update();
|
||||
$("#ping-time").text((pingtime) + "ms");
|
||||
}
|
||||
})
|
||||
}
|
||||
dc.onopen = function() {
|
||||
controlCommandInterval = setInterval(sendJoystickOverDataChannel, 50);
|
||||
latencyInterval = setInterval(checkLatency, 1000);
|
||||
sendJoystickOverDataChannel();
|
||||
};
|
||||
|
||||
const textDecoder = new TextDecoder();
|
||||
var carStaterIndex = 0;
|
||||
dc.onmessage = function(evt) {
|
||||
const text = textDecoder.decode(evt.data);
|
||||
const msg = JSON.parse(text);
|
||||
if (carStaterIndex % 100 == 0 && msg.type === 'carState') {
|
||||
const batteryLevel = Math.round(msg.data.fuelGauge * 100);
|
||||
$("#battery").text(batteryLevel + "%");
|
||||
batteryPoints.push({'x': new Date().getTime(), 'y': batteryLevel});
|
||||
if (batteryPoints.length > 1000) {
|
||||
batteryPoints.shift();
|
||||
}
|
||||
chartBattery.update();
|
||||
}
|
||||
carStaterIndex += 1;
|
||||
lastChannelMessageTime = new Date().getTime();
|
||||
$(".pre-blob").addClass('blob');
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
export function stop(pc, dc) {
|
||||
if (dc) {
|
||||
dc.close();
|
||||
}
|
||||
if (pc.getTransceivers) {
|
||||
pc.getTransceivers().forEach(function(transceiver) {
|
||||
if (transceiver.stop) {
|
||||
transceiver.stop();
|
||||
}
|
||||
});
|
||||
}
|
||||
pc.getSenders().forEach(function(sender) {
|
||||
sender.track.stop();
|
||||
});
|
||||
setTimeout(function() {
|
||||
pc.close();
|
||||
}, 500);
|
||||
}
|
||||
185
tools/bodyteleop/static/main.css
Normal file
185
tools/bodyteleop/static/main.css
Normal file
@@ -0,0 +1,185 @@
|
||||
body {
|
||||
background: #333 !important;
|
||||
color: #fff !important;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: start;
|
||||
}
|
||||
|
||||
p {
|
||||
margin: 0px !important;
|
||||
}
|
||||
|
||||
i {
|
||||
font-style: normal;
|
||||
}
|
||||
|
||||
.small {
|
||||
font-size: 1em !important
|
||||
}
|
||||
|
||||
.jumbo {
|
||||
font-size: 8rem;
|
||||
}
|
||||
|
||||
|
||||
@media (max-width: 600px) {
|
||||
.small {
|
||||
font-size: 0.5em !important
|
||||
}
|
||||
.jumbo {
|
||||
display: none;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#main {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-content: center;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
font-size: 30px;
|
||||
width: 100%;
|
||||
max-width: 1200px;
|
||||
}
|
||||
|
||||
video {
|
||||
width: 95%;
|
||||
}
|
||||
|
||||
.pre-blob {
|
||||
display: flex;
|
||||
background: #333;
|
||||
border-radius: 50%;
|
||||
margin: 10px;
|
||||
height: 45px;
|
||||
width: 45px;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
font-size: 1rem;
|
||||
}
|
||||
|
||||
.blob {
|
||||
background: rgba(231, 76, 60,1.0);
|
||||
box-shadow: 0 0 0 0 rgba(231, 76, 60,1.0);
|
||||
animation: pulse 2s infinite;
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0% {
|
||||
box-shadow: 0 0 0 0px rgba(192, 57, 43, 1);
|
||||
}
|
||||
100% {
|
||||
box-shadow: 0 0 0 20px rgba(192, 57, 43, 0);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.icon-sup-panel {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: space-around;
|
||||
align-items: center;
|
||||
background: #222;
|
||||
border-radius: 10px;
|
||||
padding: 5px;
|
||||
margin: 5px 0px 5px 0px;
|
||||
}
|
||||
|
||||
.icon-sub-panel {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
#icon-panel {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
justify-content: space-between;
|
||||
margin-top: 5px;
|
||||
}
|
||||
|
||||
.icon-sub-sub-panel {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
}
|
||||
|
||||
.keys, #key-val {
|
||||
background: #333;
|
||||
padding: 2rem;
|
||||
margin: 5px;
|
||||
color: #fff;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
border-radius: 10px;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
#key-val {
|
||||
pointer-events: none;
|
||||
background: #fff;
|
||||
color: #333;
|
||||
line-height: 1;
|
||||
font-size: 20px;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.wasd-row {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: center;
|
||||
align-items: stretch;
|
||||
}
|
||||
|
||||
#wasd {
|
||||
margin: 5px 0px 5px 0px;
|
||||
background: #222;
|
||||
border-radius: 10px;
|
||||
width: 100%;
|
||||
padding: 20px;
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: space-around;
|
||||
align-items: stretch;
|
||||
|
||||
user-select: none;
|
||||
-webkit-touch-callout: none;
|
||||
-webkit-user-select: none;
|
||||
-khtml-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
|
||||
.panel {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
margin: 5px 0px 5px 0px !important;
|
||||
background: #222;
|
||||
border-radius: 10px;
|
||||
width: 100%;
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
#ping-time, #battery {
|
||||
font-size: 25px;
|
||||
}
|
||||
|
||||
#stop {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.plan-form {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.details {
|
||||
display: flex;
|
||||
padding: 0px 10px 0px 10px;
|
||||
}
|
||||
BIN
tools/bodyteleop/static/poster.png
Normal file
BIN
tools/bodyteleop/static/poster.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 8.1 KiB |
126
tools/bodyteleop/web.py
Normal file
126
tools/bodyteleop/web.py
Normal file
@@ -0,0 +1,126 @@
|
||||
import asyncio
|
||||
import dataclasses
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import ssl
|
||||
import subprocess
|
||||
|
||||
from aiohttp import web, ClientSession
|
||||
import pyaudio
|
||||
import wave
|
||||
|
||||
from openpilot.common.basedir import BASEDIR
|
||||
from openpilot.system.webrtc.webrtcd import StreamRequestBody
|
||||
from openpilot.common.params import Params
|
||||
|
||||
logger = logging.getLogger("bodyteleop")
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
TELEOPDIR = f"{BASEDIR}/tools/bodyteleop"
|
||||
WEBRTCD_HOST, WEBRTCD_PORT = "localhost", 5001
|
||||
|
||||
|
||||
## UTILS
|
||||
async def play_sound(sound):
|
||||
SOUNDS = {
|
||||
"engage": "selfdrive/assets/sounds/engage.wav",
|
||||
"disengage": "selfdrive/assets/sounds/disengage.wav",
|
||||
"error": "selfdrive/assets/sounds/warning_immediate.wav",
|
||||
}
|
||||
assert sound in SOUNDS
|
||||
|
||||
chunk = 5120
|
||||
with wave.open(os.path.join(BASEDIR, SOUNDS[sound]), "rb") as wf:
|
||||
def callback(in_data, frame_count, time_info, status):
|
||||
data = wf.readframes(frame_count)
|
||||
return data, pyaudio.paContinue
|
||||
|
||||
p = pyaudio.PyAudio()
|
||||
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
|
||||
channels=wf.getnchannels(),
|
||||
rate=wf.getframerate(),
|
||||
output=True,
|
||||
frames_per_buffer=chunk,
|
||||
stream_callback=callback)
|
||||
stream.start_stream()
|
||||
while stream.is_active():
|
||||
await asyncio.sleep(0)
|
||||
stream.stop_stream()
|
||||
stream.close()
|
||||
p.terminate()
|
||||
|
||||
## SSL
|
||||
def create_ssl_cert(cert_path, key_path):
|
||||
try:
|
||||
proc = subprocess.run(f'openssl req -x509 -newkey rsa:4096 -nodes -out {cert_path} -keyout {key_path} \
|
||||
-days 365 -subj "/C=US/ST=California/O=commaai/OU=comma body"',
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
|
||||
proc.check_returncode()
|
||||
except subprocess.CalledProcessError as ex:
|
||||
raise ValueError(f"Error creating SSL certificate:\n[stdout]\n{proc.stdout.decode()}\n[stderr]\n{proc.stderr.decode()}") from ex
|
||||
|
||||
|
||||
def create_ssl_context():
|
||||
cert_path = os.path.join(TELEOPDIR, "cert.pem")
|
||||
key_path = os.path.join(TELEOPDIR, "key.pem")
|
||||
if not os.path.exists(cert_path) or not os.path.exists(key_path):
|
||||
logger.info("Creating certificate...")
|
||||
create_ssl_cert(cert_path, key_path)
|
||||
else:
|
||||
logger.info("Certificate exists!")
|
||||
ssl_context = ssl.SSLContext(protocol=ssl.PROTOCOL_TLS_SERVER)
|
||||
ssl_context.load_cert_chain(cert_path, key_path)
|
||||
|
||||
return ssl_context
|
||||
|
||||
## ENDPOINTS
|
||||
async def index(request):
|
||||
with open(os.path.join(TELEOPDIR, "static", "index.html"), "r") as f:
|
||||
content = f.read()
|
||||
return web.Response(content_type="text/html", text=content)
|
||||
|
||||
|
||||
async def ping(request):
|
||||
return web.Response(text="pong")
|
||||
|
||||
|
||||
async def sound(request):
|
||||
params = await request.json()
|
||||
sound_to_play = params["sound"]
|
||||
|
||||
await play_sound(sound_to_play)
|
||||
return web.json_response({"status": "ok"})
|
||||
|
||||
|
||||
async def offer(request):
|
||||
params = await request.json()
|
||||
body = StreamRequestBody(params["sdp"], ["driver"], ["testJoystick"], ["carState"])
|
||||
body_json = json.dumps(dataclasses.asdict(body))
|
||||
|
||||
logger.info("Sending offer to webrtcd...")
|
||||
webrtcd_url = f"http://{WEBRTCD_HOST}:{WEBRTCD_PORT}/stream"
|
||||
async with ClientSession() as session, session.post(webrtcd_url, data=body_json) as resp:
|
||||
assert resp.status == 200
|
||||
answer = await resp.json()
|
||||
return web.json_response(answer)
|
||||
|
||||
|
||||
def main():
|
||||
# Enable joystick debug mode
|
||||
Params().put_bool("JoystickDebugMode", True)
|
||||
|
||||
# App needs to be HTTPS for microphone and audio autoplay to work on the browser
|
||||
ssl_context = create_ssl_context()
|
||||
|
||||
app = web.Application()
|
||||
app.router.add_get("/", index)
|
||||
app.router.add_get("/ping", ping, allow_head=True)
|
||||
app.router.add_post("/offer", offer)
|
||||
app.router.add_post("/sound", sound)
|
||||
app.router.add_static('/static', os.path.join(TELEOPDIR, 'static'))
|
||||
web.run_app(app, access_log=None, host="0.0.0.0", port=5000, ssl_context=ssl_context)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
53
tools/joystick/README.md
Normal file
53
tools/joystick/README.md
Normal file
@@ -0,0 +1,53 @@
|
||||
# Joystick
|
||||
|
||||
**Hardware needed**: device running openpilot, laptop, joystick (optional)
|
||||
|
||||
With joystickd, you can connect your laptop to your comma device over the network and debug controls using a joystick or keyboard.
|
||||
joystickd uses [inputs](https://pypi.org/project/inputs) which supports many common gamepads and joysticks.
|
||||
|
||||
## Usage
|
||||
|
||||
The car must be off, and openpilot must be offroad before starting `joystickd`.
|
||||
|
||||
### Using a keyboard
|
||||
|
||||
SSH into your comma device and start joystickd with the following command:
|
||||
|
||||
```shell
|
||||
tools/joystick/joystickd.py --keyboard
|
||||
```
|
||||
|
||||
The available buttons and axes will print showing their key mappings. In general, the WASD keys control gas and brakes and steering torque in 5% increments.
|
||||
|
||||
### Joystick on your comma three
|
||||
|
||||
Plug the joystick into your comma three aux USB-C port. Then, SSH into the device and start `joystickd.py`.
|
||||
|
||||
### Joystick on your laptop
|
||||
|
||||
In order to use a joystick over the network, we need to run joystickd locally from your laptop and have it send `testJoystick` packets over the network to the comma device.
|
||||
|
||||
1. Connect a joystick to your PC.
|
||||
2. Connect your laptop to your comma device's hotspot and open a new SSH shell. Since joystickd is being run on your laptop, we need to write a parameter to let controlsd know to start in joystick debug mode:
|
||||
```shell
|
||||
# on your comma device
|
||||
echo -n "1" > /data/params/d/JoystickDebugMode
|
||||
```
|
||||
3. Run bridge with your laptop's IP address. This republishes the `testJoystick` packets sent from your laptop so that openpilot can receive them:
|
||||
```shell
|
||||
# on your comma device
|
||||
cereal/messaging/bridge {LAPTOP_IP} testJoystick
|
||||
```
|
||||
4. Start joystickd on your laptop in ZMQ mode.
|
||||
```shell
|
||||
# on your laptop
|
||||
export ZMQ=1
|
||||
tools/joystick/joystickd.py
|
||||
```
|
||||
|
||||
---
|
||||
Now start your car and openpilot should go into joystick mode with an alert on startup! The status of the axes will display on the alert, while button statuses print in the shell.
|
||||
|
||||
Make sure the conditions are met in the panda to allow controls (e.g. cruise control engaged). You can also make a modification to the panda code to always allow controls.
|
||||
|
||||

|
||||
116
tools/joystick/joystickd.py
Executable file
116
tools/joystick/joystickd.py
Executable file
@@ -0,0 +1,116 @@
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import argparse
|
||||
import threading
|
||||
from inputs import get_gamepad
|
||||
|
||||
import cereal.messaging as messaging
|
||||
from openpilot.common.realtime import Ratekeeper
|
||||
from openpilot.common.numpy_fast import interp, clip
|
||||
from openpilot.common.params import Params
|
||||
from openpilot.tools.lib.kbhit import KBHit
|
||||
|
||||
|
||||
class Keyboard:
|
||||
def __init__(self):
|
||||
self.kb = KBHit()
|
||||
self.axis_increment = 0.05 # 5% of full actuation each key press
|
||||
self.axes_map = {'w': 'gb', 's': 'gb',
|
||||
'a': 'steer', 'd': 'steer'}
|
||||
self.axes_values = {'gb': 0., 'steer': 0.}
|
||||
self.axes_order = ['gb', 'steer']
|
||||
self.cancel = False
|
||||
|
||||
def update(self):
|
||||
key = self.kb.getch().lower()
|
||||
self.cancel = False
|
||||
if key == 'r':
|
||||
self.axes_values = {ax: 0. for ax in self.axes_values}
|
||||
elif key == 'c':
|
||||
self.cancel = True
|
||||
elif key in self.axes_map:
|
||||
axis = self.axes_map[key]
|
||||
incr = self.axis_increment if key in ['w', 'a'] else -self.axis_increment
|
||||
self.axes_values[axis] = clip(self.axes_values[axis] + incr, -1, 1)
|
||||
else:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class Joystick:
|
||||
def __init__(self, gamepad=False):
|
||||
# TODO: find a way to get this from API, perhaps "inputs" doesn't support it
|
||||
if gamepad:
|
||||
self.cancel_button = 'BTN_NORTH' # (BTN_NORTH=X, ABS_RZ=Right Trigger)
|
||||
accel_axis = 'ABS_Y'
|
||||
steer_axis = 'ABS_RX'
|
||||
else:
|
||||
self.cancel_button = 'BTN_TRIGGER'
|
||||
accel_axis = 'ABS_Y'
|
||||
steer_axis = 'ABS_RX'
|
||||
self.min_axis_value = {accel_axis: 0., steer_axis: 0.}
|
||||
self.max_axis_value = {accel_axis: 255., steer_axis: 255.}
|
||||
self.axes_values = {accel_axis: 0., steer_axis: 0.}
|
||||
self.axes_order = [accel_axis, steer_axis]
|
||||
self.cancel = False
|
||||
|
||||
def update(self):
|
||||
joystick_event = get_gamepad()[0]
|
||||
event = (joystick_event.code, joystick_event.state)
|
||||
if event[0] == self.cancel_button:
|
||||
if event[1] == 1:
|
||||
self.cancel = True
|
||||
elif event[1] == 0: # state 0 is falling edge
|
||||
self.cancel = False
|
||||
elif event[0] in self.axes_values:
|
||||
self.max_axis_value[event[0]] = max(event[1], self.max_axis_value[event[0]])
|
||||
self.min_axis_value[event[0]] = min(event[1], self.min_axis_value[event[0]])
|
||||
|
||||
norm = -interp(event[1], [self.min_axis_value[event[0]], self.max_axis_value[event[0]]], [-1., 1.])
|
||||
self.axes_values[event[0]] = norm if abs(norm) > 0.05 else 0. # center can be noisy, deadzone of 5%
|
||||
else:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def send_thread(joystick):
|
||||
joystick_sock = messaging.pub_sock('testJoystick')
|
||||
rk = Ratekeeper(100, print_delay_threshold=None)
|
||||
while 1:
|
||||
dat = messaging.new_message('testJoystick')
|
||||
dat.testJoystick.axes = [joystick.axes_values[a] for a in joystick.axes_order]
|
||||
dat.testJoystick.buttons = [joystick.cancel]
|
||||
joystick_sock.send(dat.to_bytes())
|
||||
print('\n' + ', '.join(f'{name}: {round(v, 3)}' for name, v in joystick.axes_values.items()))
|
||||
rk.keep_time()
|
||||
|
||||
def joystick_thread(joystick):
|
||||
Params().put_bool('JoystickDebugMode', True)
|
||||
threading.Thread(target=send_thread, args=(joystick,), daemon=True).start()
|
||||
while True:
|
||||
joystick.update()
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Publishes events from your joystick to control your car.\n' +
|
||||
'openpilot must be offroad before starting joysticked.',
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||
parser.add_argument('--keyboard', action='store_true', help='Use your keyboard instead of a joystick')
|
||||
parser.add_argument('--gamepad', action='store_true', help='Use gamepad configuration instead of joystick')
|
||||
args = parser.parse_args()
|
||||
|
||||
if not Params().get_bool("IsOffroad") and "ZMQ" not in os.environ:
|
||||
print("The car must be off before running joystickd.")
|
||||
exit()
|
||||
|
||||
print()
|
||||
if args.keyboard:
|
||||
print('Gas/brake control: `W` and `S` keys')
|
||||
print('Steering control: `A` and `D` keys')
|
||||
print('Buttons')
|
||||
print('- `R`: Resets axes')
|
||||
print('- `C`: Cancel cruise control')
|
||||
else:
|
||||
print('Using joystick, make sure to run cereal/messaging/bridge on your device if running over the network!')
|
||||
|
||||
joystick = Keyboard() if args.keyboard else Joystick(args.gamepad)
|
||||
joystick_thread(joystick)
|
||||
51
tools/lib/README.md
Normal file
51
tools/lib/README.md
Normal file
@@ -0,0 +1,51 @@
|
||||
## LogReader
|
||||
|
||||
Route is a class for conveniently accessing all the [logs](/system/loggerd/) from your routes. The LogReader class reads the non-video logs, i.e. rlog.bz2 and qlog.bz2. There's also a matching FrameReader class for reading the videos.
|
||||
|
||||
```python
|
||||
from openpilot.tools.lib.route import Route
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
|
||||
r = Route("a2a0ccea32023010|2023-07-27--13-01-19")
|
||||
|
||||
# get a list of paths for the route's rlog files
|
||||
print(r.log_paths())
|
||||
|
||||
# and road camera (fcamera.hevc) files
|
||||
print(r.camera_paths())
|
||||
|
||||
# setup a LogReader to read the route's first rlog
|
||||
lr = LogReader(r.log_paths()[0])
|
||||
|
||||
# print out all the messages in the log
|
||||
import codecs
|
||||
codecs.register_error("strict", codecs.backslashreplace_errors)
|
||||
for msg in lr:
|
||||
print(msg)
|
||||
|
||||
# setup a LogReader for the route's second qlog
|
||||
lr = LogReader(r.log_paths()[1])
|
||||
|
||||
# print all the steering angles values from the log
|
||||
for msg in lr:
|
||||
if msg.which() == "carState":
|
||||
print(msg.carState.steeringAngleDeg)
|
||||
```
|
||||
|
||||
### MultiLogIterator
|
||||
|
||||
`MultiLogIterator` is similar to `LogReader`, but reads multiple logs.
|
||||
|
||||
```python
|
||||
from openpilot.tools.lib.route import Route
|
||||
from openpilot.tools.lib.logreader import MultiLogIterator
|
||||
|
||||
# setup a MultiLogIterator to read all the logs in the route
|
||||
r = Route("a2a0ccea32023010|2023-07-27--13-01-19")
|
||||
lr = MultiLogIterator(r.log_paths())
|
||||
|
||||
# print all the steering angles values from all the logs in the route
|
||||
for msg in lr:
|
||||
if msg.which() == "carState":
|
||||
print(msg.carState.steeringAngleDeg)
|
||||
```
|
||||
0
tools/lib/__init__.py
Normal file
0
tools/lib/__init__.py
Normal file
34
tools/lib/api.py
Normal file
34
tools/lib/api.py
Normal file
@@ -0,0 +1,34 @@
|
||||
import os
|
||||
import requests
|
||||
API_HOST = os.getenv('API_HOST', 'https://api.commadotai.com')
|
||||
|
||||
class CommaApi():
|
||||
def __init__(self, token=None):
|
||||
self.session = requests.Session()
|
||||
self.session.headers['User-agent'] = 'OpenpilotTools'
|
||||
if token:
|
||||
self.session.headers['Authorization'] = 'JWT ' + token
|
||||
|
||||
def request(self, method, endpoint, **kwargs):
|
||||
resp = self.session.request(method, API_HOST + '/' + endpoint, **kwargs)
|
||||
resp_json = resp.json()
|
||||
if isinstance(resp_json, dict) and resp_json.get('error'):
|
||||
if resp.status_code in [401, 403]:
|
||||
raise UnauthorizedError('Unauthorized. Authenticate with tools/lib/auth.py')
|
||||
|
||||
e = APIError(str(resp.status_code) + ":" + resp_json.get('description', str(resp_json['error'])))
|
||||
e.status_code = resp.status_code
|
||||
raise e
|
||||
return resp_json
|
||||
|
||||
def get(self, endpoint, **kwargs):
|
||||
return self.request('GET', endpoint, **kwargs)
|
||||
|
||||
def post(self, endpoint, **kwargs):
|
||||
return self.request('POST', endpoint, **kwargs)
|
||||
|
||||
class APIError(Exception):
|
||||
pass
|
||||
|
||||
class UnauthorizedError(Exception):
|
||||
pass
|
||||
145
tools/lib/auth.py
Executable file
145
tools/lib/auth.py
Executable file
@@ -0,0 +1,145 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Usage::
|
||||
|
||||
usage: auth.py [-h] [{google,apple,github,jwt}] [jwt]
|
||||
|
||||
Login to your comma account
|
||||
|
||||
positional arguments:
|
||||
{google,apple,github,jwt}
|
||||
jwt
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
|
||||
|
||||
Examples::
|
||||
|
||||
./auth.py # Log in with google account
|
||||
./auth.py github # Log in with GitHub Account
|
||||
./auth.py jwt ey......hw # Log in with a JWT from https://jwt.comma.ai, for use in CI
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import pprint
|
||||
import webbrowser
|
||||
from http.server import BaseHTTPRequestHandler, HTTPServer
|
||||
from typing import Any, Dict
|
||||
from urllib.parse import parse_qs, urlencode
|
||||
|
||||
from openpilot.tools.lib.api import APIError, CommaApi, UnauthorizedError
|
||||
from openpilot.tools.lib.auth_config import set_token, get_token
|
||||
|
||||
PORT = 3000
|
||||
|
||||
|
||||
class ClientRedirectServer(HTTPServer):
|
||||
query_params: Dict[str, Any] = {}
|
||||
|
||||
|
||||
class ClientRedirectHandler(BaseHTTPRequestHandler):
|
||||
def do_GET(self):
|
||||
if not self.path.startswith('/auth'):
|
||||
self.send_response(204)
|
||||
return
|
||||
|
||||
query = self.path.split('?', 1)[-1]
|
||||
query_parsed = parse_qs(query, keep_blank_values=True)
|
||||
self.server.query_params = query_parsed
|
||||
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'text/plain')
|
||||
self.end_headers()
|
||||
self.wfile.write(b'Return to the CLI to continue')
|
||||
|
||||
def log_message(self, *args):
|
||||
pass # this prevent http server from dumping messages to stdout
|
||||
|
||||
|
||||
def auth_redirect_link(method):
|
||||
provider_id = {
|
||||
'google': 'g',
|
||||
'apple': 'a',
|
||||
'github': 'h',
|
||||
}[method]
|
||||
|
||||
params = {
|
||||
'redirect_uri': f"https://api.comma.ai/v2/auth/{provider_id}/redirect/",
|
||||
'state': f'service,localhost:{PORT}',
|
||||
}
|
||||
|
||||
if method == 'google':
|
||||
params.update({
|
||||
'type': 'web_server',
|
||||
'client_id': '45471411055-ornt4svd2miog6dnopve7qtmh5mnu6id.apps.googleusercontent.com',
|
||||
'response_type': 'code',
|
||||
'scope': 'https://www.googleapis.com/auth/userinfo.email',
|
||||
'prompt': 'select_account',
|
||||
})
|
||||
return 'https://accounts.google.com/o/oauth2/auth?' + urlencode(params)
|
||||
elif method == 'github':
|
||||
params.update({
|
||||
'client_id': '28c4ecb54bb7272cb5a4',
|
||||
'scope': 'read:user',
|
||||
})
|
||||
return 'https://github.com/login/oauth/authorize?' + urlencode(params)
|
||||
elif method == 'apple':
|
||||
params.update({
|
||||
'client_id': 'ai.comma.login',
|
||||
'response_type': 'code',
|
||||
'response_mode': 'form_post',
|
||||
'scope': 'name email',
|
||||
})
|
||||
return 'https://appleid.apple.com/auth/authorize?' + urlencode(params)
|
||||
else:
|
||||
raise NotImplementedError(f"no redirect implemented for method {method}")
|
||||
|
||||
|
||||
def login(method):
|
||||
oauth_uri = auth_redirect_link(method)
|
||||
|
||||
web_server = ClientRedirectServer(('localhost', PORT), ClientRedirectHandler)
|
||||
print(f'To sign in, use your browser and navigate to {oauth_uri}')
|
||||
webbrowser.open(oauth_uri, new=2)
|
||||
|
||||
while True:
|
||||
web_server.handle_request()
|
||||
if 'code' in web_server.query_params:
|
||||
break
|
||||
elif 'error' in web_server.query_params:
|
||||
print('Authentication Error: "{}". Description: "{}" '.format(
|
||||
web_server.query_params['error'],
|
||||
web_server.query_params.get('error_description')), file=sys.stderr)
|
||||
break
|
||||
|
||||
try:
|
||||
auth_resp = CommaApi().post('v2/auth/', data={'code': web_server.query_params['code'], 'provider': web_server.query_params['provider']})
|
||||
set_token(auth_resp['access_token'])
|
||||
except APIError as e:
|
||||
print(f'Authentication Error: {e}', file=sys.stderr)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
parser = argparse.ArgumentParser(description='Login to your comma account')
|
||||
parser.add_argument('method', default='google', const='google', nargs='?', choices=['google', 'apple', 'github', 'jwt'])
|
||||
parser.add_argument('jwt', nargs='?')
|
||||
|
||||
args = parser.parse_args()
|
||||
if args.method == 'jwt':
|
||||
if args.jwt is None:
|
||||
print("method JWT selected, but no JWT was provided")
|
||||
exit(1)
|
||||
|
||||
set_token(args.jwt)
|
||||
else:
|
||||
login(args.method)
|
||||
|
||||
try:
|
||||
me = CommaApi(token=get_token()).get('/v1/me')
|
||||
print("Authenticated!")
|
||||
pprint.pprint(me)
|
||||
except UnauthorizedError:
|
||||
print("Got invalid JWT")
|
||||
exit(1)
|
||||
29
tools/lib/auth_config.py
Normal file
29
tools/lib/auth_config.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import json
|
||||
import os
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
|
||||
|
||||
class MissingAuthConfigError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def get_token():
|
||||
try:
|
||||
with open(os.path.join(Paths.config_root(), 'auth.json')) as f:
|
||||
auth = json.load(f)
|
||||
return auth['access_token']
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def set_token(token):
|
||||
os.makedirs(Paths.config_root(), exist_ok=True)
|
||||
with open(os.path.join(Paths.config_root(), 'auth.json'), 'w') as f:
|
||||
json.dump({'access_token': token}, f)
|
||||
|
||||
|
||||
def clear_token():
|
||||
try:
|
||||
os.unlink(os.path.join(Paths.config_root(), 'auth.json'))
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
63
tools/lib/bootlog.py
Normal file
63
tools/lib/bootlog.py
Normal file
@@ -0,0 +1,63 @@
|
||||
import datetime
|
||||
import functools
|
||||
import re
|
||||
from typing import List, Optional
|
||||
|
||||
from openpilot.tools.lib.auth_config import get_token
|
||||
from openpilot.tools.lib.api import CommaApi
|
||||
from openpilot.tools.lib.helpers import RE, timestamp_to_datetime
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
class Bootlog:
|
||||
def __init__(self, url: str):
|
||||
self._url = url
|
||||
|
||||
r = re.search(RE.BOOTLOG_NAME, url)
|
||||
if not r:
|
||||
raise Exception(f"Unable to parse: {url}")
|
||||
|
||||
self._dongle_id = r.group('dongle_id')
|
||||
self._timestamp = r.group('timestamp')
|
||||
|
||||
@property
|
||||
def url(self) -> str:
|
||||
return self._url
|
||||
|
||||
@property
|
||||
def dongle_id(self) -> str:
|
||||
return self._dongle_id
|
||||
|
||||
@property
|
||||
def timestamp(self) -> str:
|
||||
return self._timestamp
|
||||
|
||||
@property
|
||||
def datetime(self) -> datetime.datetime:
|
||||
return timestamp_to_datetime(self._timestamp)
|
||||
|
||||
def __str__(self):
|
||||
return f"{self._dongle_id}|{self._timestamp}"
|
||||
|
||||
def __eq__(self, b) -> bool:
|
||||
if not isinstance(b, Bootlog):
|
||||
return False
|
||||
return self.datetime == b.datetime
|
||||
|
||||
def __lt__(self, b) -> bool:
|
||||
if not isinstance(b, Bootlog):
|
||||
return False
|
||||
return self.datetime < b.datetime
|
||||
|
||||
def get_bootlog_from_id(bootlog_id: str) -> Optional[Bootlog]:
|
||||
# TODO: implement an API endpoint for this
|
||||
bl = Bootlog(bootlog_id)
|
||||
for b in get_bootlogs(bl.dongle_id):
|
||||
if b == bl:
|
||||
return b
|
||||
return None
|
||||
|
||||
def get_bootlogs(dongle_id: str) -> List[Bootlog]:
|
||||
api = CommaApi(get_token())
|
||||
r = api.get(f'v1/devices/{dongle_id}/bootlogs')
|
||||
return [Bootlog(b) for b in r]
|
||||
14
tools/lib/cache.py
Normal file
14
tools/lib/cache.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import os
|
||||
import urllib.parse
|
||||
|
||||
DEFAULT_CACHE_DIR = os.getenv("CACHE_ROOT", os.path.expanduser("~/.commacache"))
|
||||
|
||||
def cache_path_for_file_path(fn, cache_dir=DEFAULT_CACHE_DIR):
|
||||
dir_ = os.path.join(cache_dir, "local")
|
||||
os.makedirs(dir_, exist_ok=True)
|
||||
fn_parsed = urllib.parse.urlparse(fn)
|
||||
if fn_parsed.scheme == '':
|
||||
cache_fn = os.path.abspath(fn).replace("/", "_")
|
||||
else:
|
||||
cache_fn = f'{fn_parsed.hostname}_{fn_parsed.path.replace("/", "_")}'
|
||||
return os.path.join(dir_, cache_fn)
|
||||
2
tools/lib/exceptions.py
Normal file
2
tools/lib/exceptions.py
Normal file
@@ -0,0 +1,2 @@
|
||||
class DataUnreadableError(Exception):
|
||||
pass
|
||||
15
tools/lib/filereader.py
Normal file
15
tools/lib/filereader.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import os
|
||||
from openpilot.tools.lib.url_file import URLFile
|
||||
|
||||
DATA_ENDPOINT = os.getenv("DATA_ENDPOINT", "http://data-raw.comma.internal/")
|
||||
|
||||
def resolve_name(fn):
|
||||
if fn.startswith("cd:/"):
|
||||
return fn.replace("cd:/", DATA_ENDPOINT)
|
||||
return fn
|
||||
|
||||
def FileReader(fn, debug=False):
|
||||
fn = resolve_name(fn)
|
||||
if fn.startswith(("http://", "https://")):
|
||||
return URLFile(fn, debug=debug)
|
||||
return open(fn, "rb")
|
||||
537
tools/lib/framereader.py
Normal file
537
tools/lib/framereader.py
Normal file
@@ -0,0 +1,537 @@
|
||||
import json
|
||||
import os
|
||||
import pickle
|
||||
import struct
|
||||
import subprocess
|
||||
import threading
|
||||
from enum import IntEnum
|
||||
from functools import wraps
|
||||
|
||||
import numpy as np
|
||||
from lru import LRU
|
||||
|
||||
import _io
|
||||
from openpilot.tools.lib.cache import cache_path_for_file_path, DEFAULT_CACHE_DIR
|
||||
from openpilot.tools.lib.exceptions import DataUnreadableError
|
||||
from openpilot.tools.lib.vidindex import hevc_index
|
||||
from openpilot.common.file_helpers import atomic_write_in_dir
|
||||
|
||||
from openpilot.tools.lib.filereader import FileReader, resolve_name
|
||||
|
||||
HEVC_SLICE_B = 0
|
||||
HEVC_SLICE_P = 1
|
||||
HEVC_SLICE_I = 2
|
||||
|
||||
|
||||
class GOPReader:
|
||||
def get_gop(self, num):
|
||||
# returns (start_frame_num, num_frames, frames_to_skip, gop_data)
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class DoNothingContextManager:
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *x):
|
||||
pass
|
||||
|
||||
|
||||
class FrameType(IntEnum):
|
||||
raw = 1
|
||||
h265_stream = 2
|
||||
|
||||
|
||||
def fingerprint_video(fn):
|
||||
with FileReader(fn) as f:
|
||||
header = f.read(4)
|
||||
if len(header) == 0:
|
||||
raise DataUnreadableError(f"{fn} is empty")
|
||||
elif header == b"\x00\xc0\x12\x00":
|
||||
return FrameType.raw
|
||||
elif header == b"\x00\x00\x00\x01":
|
||||
if 'hevc' in fn:
|
||||
return FrameType.h265_stream
|
||||
else:
|
||||
raise NotImplementedError(fn)
|
||||
else:
|
||||
raise NotImplementedError(fn)
|
||||
|
||||
|
||||
def ffprobe(fn, fmt=None):
|
||||
fn = resolve_name(fn)
|
||||
cmd = ["ffprobe", "-v", "quiet", "-print_format", "json", "-show_format", "-show_streams"]
|
||||
if fmt:
|
||||
cmd += ["-f", fmt]
|
||||
cmd += ["-i", "-"]
|
||||
|
||||
try:
|
||||
with FileReader(fn) as f:
|
||||
ffprobe_output = subprocess.check_output(cmd, input=f.read(4096))
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise DataUnreadableError(fn) from e
|
||||
|
||||
return json.loads(ffprobe_output)
|
||||
|
||||
|
||||
def cache_fn(func):
|
||||
@wraps(func)
|
||||
def cache_inner(fn, *args, **kwargs):
|
||||
if kwargs.pop('no_cache', None):
|
||||
cache_path = None
|
||||
else:
|
||||
cache_dir = kwargs.pop('cache_dir', DEFAULT_CACHE_DIR)
|
||||
cache_path = cache_path_for_file_path(fn, cache_dir)
|
||||
|
||||
if cache_path and os.path.exists(cache_path):
|
||||
with open(cache_path, "rb") as cache_file:
|
||||
cache_value = pickle.load(cache_file)
|
||||
else:
|
||||
cache_value = func(fn, *args, **kwargs)
|
||||
if cache_path:
|
||||
with atomic_write_in_dir(cache_path, mode="wb", overwrite=True) as cache_file:
|
||||
pickle.dump(cache_value, cache_file, -1)
|
||||
|
||||
return cache_value
|
||||
|
||||
return cache_inner
|
||||
|
||||
|
||||
@cache_fn
|
||||
def index_stream(fn, ft):
|
||||
if ft != FrameType.h265_stream:
|
||||
raise NotImplementedError("Only h265 supported")
|
||||
|
||||
frame_types, dat_len, prefix = hevc_index(fn)
|
||||
index = np.array(frame_types + [(0xFFFFFFFF, dat_len)], dtype=np.uint32)
|
||||
probe = ffprobe(fn, "hevc")
|
||||
|
||||
return {
|
||||
'index': index,
|
||||
'global_prefix': prefix,
|
||||
'probe': probe
|
||||
}
|
||||
|
||||
|
||||
def get_video_index(fn, frame_type, cache_dir=DEFAULT_CACHE_DIR):
|
||||
return index_stream(fn, frame_type, cache_dir=cache_dir)
|
||||
|
||||
def read_file_check_size(f, sz, cookie):
|
||||
buff = bytearray(sz)
|
||||
bytes_read = f.readinto(buff)
|
||||
assert bytes_read == sz, (bytes_read, sz)
|
||||
return buff
|
||||
|
||||
|
||||
def rgb24toyuv(rgb):
|
||||
yuv_from_rgb = np.array([[ 0.299 , 0.587 , 0.114 ],
|
||||
[-0.14714119, -0.28886916, 0.43601035 ],
|
||||
[ 0.61497538, -0.51496512, -0.10001026 ]])
|
||||
img = np.dot(rgb.reshape(-1, 3), yuv_from_rgb.T).reshape(rgb.shape)
|
||||
|
||||
|
||||
|
||||
ys = img[:, :, 0]
|
||||
us = (img[::2, ::2, 1] + img[1::2, ::2, 1] + img[::2, 1::2, 1] + img[1::2, 1::2, 1]) / 4 + 128
|
||||
vs = (img[::2, ::2, 2] + img[1::2, ::2, 2] + img[::2, 1::2, 2] + img[1::2, 1::2, 2]) / 4 + 128
|
||||
|
||||
return ys, us, vs
|
||||
|
||||
|
||||
def rgb24toyuv420(rgb):
|
||||
ys, us, vs = rgb24toyuv(rgb)
|
||||
|
||||
y_len = rgb.shape[0] * rgb.shape[1]
|
||||
uv_len = y_len // 4
|
||||
|
||||
yuv420 = np.empty(y_len + 2 * uv_len, dtype=rgb.dtype)
|
||||
yuv420[:y_len] = ys.reshape(-1)
|
||||
yuv420[y_len:y_len + uv_len] = us.reshape(-1)
|
||||
yuv420[y_len + uv_len:y_len + 2 * uv_len] = vs.reshape(-1)
|
||||
|
||||
return yuv420.clip(0, 255).astype('uint8')
|
||||
|
||||
|
||||
def rgb24tonv12(rgb):
|
||||
ys, us, vs = rgb24toyuv(rgb)
|
||||
|
||||
y_len = rgb.shape[0] * rgb.shape[1]
|
||||
uv_len = y_len // 4
|
||||
|
||||
nv12 = np.empty(y_len + 2 * uv_len, dtype=rgb.dtype)
|
||||
nv12[:y_len] = ys.reshape(-1)
|
||||
nv12[y_len::2] = us.reshape(-1)
|
||||
nv12[y_len+1::2] = vs.reshape(-1)
|
||||
|
||||
return nv12.clip(0, 255).astype('uint8')
|
||||
|
||||
|
||||
def decompress_video_data(rawdat, vid_fmt, w, h, pix_fmt):
|
||||
threads = os.getenv("FFMPEG_THREADS", "0")
|
||||
cuda = os.getenv("FFMPEG_CUDA", "0") == "1"
|
||||
args = ["ffmpeg", "-v", "quiet",
|
||||
"-threads", threads,
|
||||
"-hwaccel", "none" if not cuda else "cuda",
|
||||
"-c:v", "hevc",
|
||||
"-vsync", "0",
|
||||
"-f", vid_fmt,
|
||||
"-flags2", "showall",
|
||||
"-i", "-",
|
||||
"-threads", threads,
|
||||
"-f", "rawvideo",
|
||||
"-pix_fmt", pix_fmt,
|
||||
"-"]
|
||||
dat = subprocess.check_output(args, input=rawdat)
|
||||
|
||||
if pix_fmt == "rgb24":
|
||||
ret = np.frombuffer(dat, dtype=np.uint8).reshape(-1, h, w, 3)
|
||||
elif pix_fmt == "nv12":
|
||||
ret = np.frombuffer(dat, dtype=np.uint8).reshape(-1, (h*w*3//2))
|
||||
elif pix_fmt == "yuv420p":
|
||||
ret = np.frombuffer(dat, dtype=np.uint8).reshape(-1, (h*w*3//2))
|
||||
elif pix_fmt == "yuv444p":
|
||||
ret = np.frombuffer(dat, dtype=np.uint8).reshape(-1, 3, h, w)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
class BaseFrameReader:
|
||||
# properties: frame_type, frame_count, w, h
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *args):
|
||||
self.close()
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
def get(self, num, count=1, pix_fmt="yuv420p"):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def FrameReader(fn, cache_dir=DEFAULT_CACHE_DIR, readahead=False, readbehind=False, index_data=None):
|
||||
frame_type = fingerprint_video(fn)
|
||||
if frame_type == FrameType.raw:
|
||||
return RawFrameReader(fn)
|
||||
elif frame_type in (FrameType.h265_stream,):
|
||||
if not index_data:
|
||||
index_data = get_video_index(fn, frame_type, cache_dir)
|
||||
return StreamFrameReader(fn, frame_type, index_data, readahead=readahead, readbehind=readbehind)
|
||||
else:
|
||||
raise NotImplementedError(frame_type)
|
||||
|
||||
|
||||
class RawData:
|
||||
def __init__(self, f):
|
||||
self.f = _io.FileIO(f, 'rb')
|
||||
self.lenn = struct.unpack("I", self.f.read(4))[0]
|
||||
self.count = os.path.getsize(f) / (self.lenn+4)
|
||||
|
||||
def read(self, i):
|
||||
self.f.seek((self.lenn+4)*i + 4)
|
||||
return self.f.read(self.lenn)
|
||||
|
||||
|
||||
class RawFrameReader(BaseFrameReader):
|
||||
def __init__(self, fn):
|
||||
# raw camera
|
||||
self.fn = fn
|
||||
self.frame_type = FrameType.raw
|
||||
self.rawfile = RawData(self.fn)
|
||||
self.frame_count = self.rawfile.count
|
||||
self.w, self.h = 640, 480
|
||||
|
||||
def load_and_debayer(self, img):
|
||||
img = np.frombuffer(img, dtype='uint8').reshape(960, 1280)
|
||||
cimg = np.dstack([img[0::2, 1::2], ((img[0::2, 0::2].astype("uint16") + img[1::2, 1::2].astype("uint16")) >> 1).astype("uint8"), img[1::2, 0::2]])
|
||||
return cimg
|
||||
|
||||
def get(self, num, count=1, pix_fmt="yuv420p"):
|
||||
assert self.frame_count is not None
|
||||
assert num+count <= self.frame_count
|
||||
|
||||
if pix_fmt not in ("nv12", "yuv420p", "rgb24"):
|
||||
raise ValueError(f"Unsupported pixel format {pix_fmt!r}")
|
||||
|
||||
app = []
|
||||
for i in range(num, num+count):
|
||||
dat = self.rawfile.read(i)
|
||||
rgb_dat = self.load_and_debayer(dat)
|
||||
if pix_fmt == "rgb24":
|
||||
app.append(rgb_dat)
|
||||
elif pix_fmt == "nv12":
|
||||
app.append(rgb24tonv12(rgb_dat))
|
||||
elif pix_fmt == "yuv420p":
|
||||
app.append(rgb24toyuv420(rgb_dat))
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
return app
|
||||
|
||||
|
||||
class VideoStreamDecompressor:
|
||||
def __init__(self, fn, vid_fmt, w, h, pix_fmt):
|
||||
self.fn = fn
|
||||
self.vid_fmt = vid_fmt
|
||||
self.w = w
|
||||
self.h = h
|
||||
self.pix_fmt = pix_fmt
|
||||
|
||||
if pix_fmt in ("nv12", "yuv420p"):
|
||||
self.out_size = w*h*3//2 # yuv420p
|
||||
elif pix_fmt in ("rgb24", "yuv444p"):
|
||||
self.out_size = w*h*3
|
||||
else:
|
||||
raise NotImplementedError
|
||||
|
||||
self.proc = None
|
||||
self.t = threading.Thread(target=self.write_thread)
|
||||
self.t.daemon = True
|
||||
|
||||
def write_thread(self):
|
||||
try:
|
||||
with FileReader(self.fn) as f:
|
||||
while True:
|
||||
r = f.read(1024*1024)
|
||||
if len(r) == 0:
|
||||
break
|
||||
self.proc.stdin.write(r)
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
finally:
|
||||
self.proc.stdin.close()
|
||||
|
||||
def read(self):
|
||||
threads = os.getenv("FFMPEG_THREADS", "0")
|
||||
cuda = os.getenv("FFMPEG_CUDA", "0") == "1"
|
||||
cmd = [
|
||||
"ffmpeg",
|
||||
"-threads", threads,
|
||||
"-hwaccel", "none" if not cuda else "cuda",
|
||||
"-c:v", "hevc",
|
||||
# "-avioflags", "direct",
|
||||
"-analyzeduration", "0",
|
||||
"-probesize", "32",
|
||||
"-flush_packets", "0",
|
||||
# "-fflags", "nobuffer",
|
||||
"-vsync", "0",
|
||||
"-f", self.vid_fmt,
|
||||
"-i", "pipe:0",
|
||||
"-threads", threads,
|
||||
"-f", "rawvideo",
|
||||
"-pix_fmt", self.pix_fmt,
|
||||
"pipe:1"
|
||||
]
|
||||
self.proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
|
||||
try:
|
||||
self.t.start()
|
||||
|
||||
while True:
|
||||
dat = self.proc.stdout.read(self.out_size)
|
||||
if len(dat) == 0:
|
||||
break
|
||||
assert len(dat) == self.out_size
|
||||
if self.pix_fmt == "rgb24":
|
||||
ret = np.frombuffer(dat, dtype=np.uint8).reshape((self.h, self.w, 3))
|
||||
elif self.pix_fmt == "yuv420p":
|
||||
ret = np.frombuffer(dat, dtype=np.uint8)
|
||||
elif self.pix_fmt == "nv12":
|
||||
ret = np.frombuffer(dat, dtype=np.uint8)
|
||||
elif self.pix_fmt == "yuv444p":
|
||||
ret = np.frombuffer(dat, dtype=np.uint8).reshape((3, self.h, self.w))
|
||||
else:
|
||||
raise RuntimeError(f"unknown pix_fmt: {self.pix_fmt}")
|
||||
yield ret
|
||||
|
||||
result_code = self.proc.wait()
|
||||
assert result_code == 0, result_code
|
||||
finally:
|
||||
self.proc.kill()
|
||||
self.t.join()
|
||||
|
||||
class StreamGOPReader(GOPReader):
|
||||
def __init__(self, fn, frame_type, index_data):
|
||||
assert frame_type == FrameType.h265_stream
|
||||
|
||||
self.fn = fn
|
||||
|
||||
self.frame_type = frame_type
|
||||
self.frame_count = None
|
||||
self.w, self.h = None, None
|
||||
|
||||
self.prefix = None
|
||||
self.index = None
|
||||
|
||||
self.index = index_data['index']
|
||||
self.prefix = index_data['global_prefix']
|
||||
probe = index_data['probe']
|
||||
|
||||
self.prefix_frame_data = None
|
||||
self.num_prefix_frames = 0
|
||||
self.vid_fmt = "hevc"
|
||||
|
||||
i = 0
|
||||
while i < self.index.shape[0] and self.index[i, 0] != HEVC_SLICE_I:
|
||||
i += 1
|
||||
self.first_iframe = i
|
||||
|
||||
assert self.first_iframe == 0
|
||||
|
||||
self.frame_count = len(self.index) - 1
|
||||
|
||||
self.w = probe['streams'][0]['width']
|
||||
self.h = probe['streams'][0]['height']
|
||||
|
||||
def _lookup_gop(self, num):
|
||||
frame_b = num
|
||||
while frame_b > 0 and self.index[frame_b, 0] != HEVC_SLICE_I:
|
||||
frame_b -= 1
|
||||
|
||||
frame_e = num + 1
|
||||
while frame_e < (len(self.index) - 1) and self.index[frame_e, 0] != HEVC_SLICE_I:
|
||||
frame_e += 1
|
||||
|
||||
offset_b = self.index[frame_b, 1]
|
||||
offset_e = self.index[frame_e, 1]
|
||||
|
||||
return (frame_b, frame_e, offset_b, offset_e)
|
||||
|
||||
def get_gop(self, num):
|
||||
frame_b, frame_e, offset_b, offset_e = self._lookup_gop(num)
|
||||
assert frame_b <= num < frame_e
|
||||
|
||||
num_frames = frame_e - frame_b
|
||||
|
||||
with FileReader(self.fn) as f:
|
||||
f.seek(offset_b)
|
||||
rawdat = f.read(offset_e - offset_b)
|
||||
|
||||
if num < self.first_iframe:
|
||||
assert self.prefix_frame_data
|
||||
rawdat = self.prefix_frame_data + rawdat
|
||||
|
||||
rawdat = self.prefix + rawdat
|
||||
|
||||
skip_frames = 0
|
||||
if num < self.first_iframe:
|
||||
skip_frames = self.num_prefix_frames
|
||||
|
||||
return frame_b, num_frames, skip_frames, rawdat
|
||||
|
||||
|
||||
class GOPFrameReader(BaseFrameReader):
|
||||
#FrameReader with caching and readahead for formats that are group-of-picture based
|
||||
|
||||
def __init__(self, readahead=False, readbehind=False):
|
||||
self.open_ = True
|
||||
|
||||
self.readahead = readahead
|
||||
self.readbehind = readbehind
|
||||
self.frame_cache = LRU(64)
|
||||
|
||||
if self.readahead:
|
||||
self.cache_lock = threading.RLock()
|
||||
self.readahead_last = None
|
||||
self.readahead_len = 30
|
||||
self.readahead_c = threading.Condition()
|
||||
self.readahead_thread = threading.Thread(target=self._readahead_thread)
|
||||
self.readahead_thread.daemon = True
|
||||
self.readahead_thread.start()
|
||||
else:
|
||||
self.cache_lock = DoNothingContextManager()
|
||||
|
||||
def close(self):
|
||||
if not self.open_:
|
||||
return
|
||||
self.open_ = False
|
||||
|
||||
if self.readahead:
|
||||
self.readahead_c.acquire()
|
||||
self.readahead_c.notify()
|
||||
self.readahead_c.release()
|
||||
self.readahead_thread.join()
|
||||
|
||||
def _readahead_thread(self):
|
||||
while True:
|
||||
self.readahead_c.acquire()
|
||||
try:
|
||||
if not self.open_:
|
||||
break
|
||||
self.readahead_c.wait()
|
||||
finally:
|
||||
self.readahead_c.release()
|
||||
if not self.open_:
|
||||
break
|
||||
assert self.readahead_last
|
||||
num, pix_fmt = self.readahead_last
|
||||
|
||||
if self.readbehind:
|
||||
for k in range(num - 1, max(0, num - self.readahead_len), -1):
|
||||
self._get_one(k, pix_fmt)
|
||||
else:
|
||||
for k in range(num, min(self.frame_count, num + self.readahead_len)):
|
||||
self._get_one(k, pix_fmt)
|
||||
|
||||
def _get_one(self, num, pix_fmt):
|
||||
assert num < self.frame_count
|
||||
|
||||
if (num, pix_fmt) in self.frame_cache:
|
||||
return self.frame_cache[(num, pix_fmt)]
|
||||
|
||||
with self.cache_lock:
|
||||
if (num, pix_fmt) in self.frame_cache:
|
||||
return self.frame_cache[(num, pix_fmt)]
|
||||
|
||||
frame_b, num_frames, skip_frames, rawdat = self.get_gop(num)
|
||||
|
||||
ret = decompress_video_data(rawdat, self.vid_fmt, self.w, self.h, pix_fmt)
|
||||
ret = ret[skip_frames:]
|
||||
assert ret.shape[0] == num_frames
|
||||
|
||||
for i in range(ret.shape[0]):
|
||||
self.frame_cache[(frame_b+i, pix_fmt)] = ret[i]
|
||||
|
||||
return self.frame_cache[(num, pix_fmt)]
|
||||
|
||||
def get(self, num, count=1, pix_fmt="yuv420p"):
|
||||
assert self.frame_count is not None
|
||||
|
||||
if num + count > self.frame_count:
|
||||
raise ValueError(f"{num + count} > {self.frame_count}")
|
||||
|
||||
if pix_fmt not in ("nv12", "yuv420p", "rgb24", "yuv444p"):
|
||||
raise ValueError(f"Unsupported pixel format {pix_fmt!r}")
|
||||
|
||||
ret = [self._get_one(num + i, pix_fmt) for i in range(count)]
|
||||
|
||||
if self.readahead:
|
||||
self.readahead_last = (num+count, pix_fmt)
|
||||
self.readahead_c.acquire()
|
||||
self.readahead_c.notify()
|
||||
self.readahead_c.release()
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
class StreamFrameReader(StreamGOPReader, GOPFrameReader):
|
||||
def __init__(self, fn, frame_type, index_data, readahead=False, readbehind=False):
|
||||
StreamGOPReader.__init__(self, fn, frame_type, index_data)
|
||||
GOPFrameReader.__init__(self, readahead, readbehind)
|
||||
|
||||
|
||||
def GOPFrameIterator(gop_reader, pix_fmt):
|
||||
dec = VideoStreamDecompressor(gop_reader.fn, gop_reader.vid_fmt, gop_reader.w, gop_reader.h, pix_fmt)
|
||||
yield from dec.read()
|
||||
|
||||
|
||||
def FrameIterator(fn, pix_fmt, **kwargs):
|
||||
fr = FrameReader(fn, **kwargs)
|
||||
if isinstance(fr, GOPReader):
|
||||
yield from GOPFrameIterator(fr, pix_fmt)
|
||||
else:
|
||||
for i in range(fr.frame_count):
|
||||
yield fr.get(i, pix_fmt=pix_fmt)[0]
|
||||
35
tools/lib/helpers.py
Normal file
35
tools/lib/helpers.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import bz2
|
||||
import datetime
|
||||
|
||||
TIME_FMT = "%Y-%m-%d--%H-%M-%S"
|
||||
|
||||
# regex patterns
|
||||
class RE:
|
||||
DONGLE_ID = r'(?P<dongle_id>[a-z0-9]{16})'
|
||||
TIMESTAMP = r'(?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}--[0-9]{2}-[0-9]{2}-[0-9]{2})'
|
||||
ROUTE_NAME = r'(?P<route_name>{}[|_/]{})'.format(DONGLE_ID, TIMESTAMP)
|
||||
SEGMENT_NAME = r'{}(?:--|/)(?P<segment_num>[0-9]+)'.format(ROUTE_NAME)
|
||||
INDEX = r'-?[0-9]+'
|
||||
SLICE = r'(?P<start>{})?:?(?P<end>{})?:?(?P<step>{})?'.format(INDEX, INDEX, INDEX)
|
||||
SEGMENT_RANGE = r'{}(?:--|/)?(?P<slice>({}))?/?(?P<selector>([qr]))?'.format(ROUTE_NAME, SLICE)
|
||||
BOOTLOG_NAME = ROUTE_NAME
|
||||
|
||||
EXPLORER_FILE = r'^(?P<segment_name>{})--(?P<file_name>[a-z]+\.[a-z0-9]+)$'.format(SEGMENT_NAME)
|
||||
OP_SEGMENT_DIR = r'^(?P<segment_name>{})$'.format(SEGMENT_NAME)
|
||||
|
||||
|
||||
def timestamp_to_datetime(t: str) -> datetime.datetime:
|
||||
"""
|
||||
Convert an openpilot route timestamp to a python datetime
|
||||
"""
|
||||
return datetime.datetime.strptime(t, TIME_FMT)
|
||||
|
||||
|
||||
def save_log(dest, log_msgs, compress=True):
|
||||
dat = b"".join(msg.as_builder().to_bytes() for msg in log_msgs)
|
||||
|
||||
if compress:
|
||||
dat = bz2.compress(dat)
|
||||
|
||||
with open(dest, "wb") as f:
|
||||
f.write(dat)
|
||||
81
tools/lib/kbhit.py
Executable file
81
tools/lib/kbhit.py
Executable file
@@ -0,0 +1,81 @@
|
||||
#!/usr/bin/env python
|
||||
import sys
|
||||
import termios
|
||||
import atexit
|
||||
from select import select
|
||||
|
||||
STDIN_FD = sys.stdin.fileno()
|
||||
|
||||
class KBHit:
|
||||
def __init__(self) -> None:
|
||||
''' Creates a KBHit object that you can call to do various keyboard things.
|
||||
'''
|
||||
|
||||
self.set_kbhit_terminal()
|
||||
|
||||
def set_kbhit_terminal(self) -> None:
|
||||
''' Save old terminal settings for closure, remove ICANON & ECHO flags.
|
||||
'''
|
||||
|
||||
# Save the terminal settings
|
||||
self.old_term = termios.tcgetattr(STDIN_FD)
|
||||
self.new_term = self.old_term.copy()
|
||||
|
||||
# New terminal setting unbuffered
|
||||
self.new_term[3] &= ~(termios.ICANON | termios.ECHO)
|
||||
termios.tcsetattr(STDIN_FD, termios.TCSAFLUSH, self.new_term)
|
||||
|
||||
# Support normal-terminal reset at exit
|
||||
atexit.register(self.set_normal_term)
|
||||
|
||||
def set_normal_term(self) -> None:
|
||||
''' Resets to normal terminal. On Windows this is a no-op.
|
||||
'''
|
||||
|
||||
termios.tcsetattr(STDIN_FD, termios.TCSAFLUSH, self.old_term)
|
||||
|
||||
@staticmethod
|
||||
def getch() -> str:
|
||||
''' Returns a keyboard character after kbhit() has been called.
|
||||
Should not be called in the same program as getarrow().
|
||||
'''
|
||||
return sys.stdin.read(1)
|
||||
|
||||
@staticmethod
|
||||
def getarrow() -> int:
|
||||
''' Returns an arrow-key code after kbhit() has been called. Codes are
|
||||
0 : up
|
||||
1 : right
|
||||
2 : down
|
||||
3 : left
|
||||
Should not be called in the same program as getch().
|
||||
'''
|
||||
|
||||
c = sys.stdin.read(3)[2]
|
||||
vals = [65, 67, 66, 68]
|
||||
|
||||
return vals.index(ord(c))
|
||||
|
||||
@staticmethod
|
||||
def kbhit():
|
||||
''' Returns True if keyboard character was hit, False otherwise.
|
||||
'''
|
||||
return select([sys.stdin], [], [], 0)[0] != []
|
||||
|
||||
|
||||
# Test
|
||||
if __name__ == "__main__":
|
||||
|
||||
kb = KBHit()
|
||||
|
||||
print('Hit any key, or ESC to exit')
|
||||
|
||||
while True:
|
||||
|
||||
if kb.kbhit():
|
||||
c = kb.getch()
|
||||
if c == '\x1b': # ESC
|
||||
break
|
||||
print(c)
|
||||
|
||||
kb.set_normal_term()
|
||||
141
tools/lib/logreader.py
Executable file
141
tools/lib/logreader.py
Executable file
@@ -0,0 +1,141 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import bz2
|
||||
import urllib.parse
|
||||
import capnp
|
||||
import warnings
|
||||
|
||||
from typing import Iterable, Iterator
|
||||
|
||||
from cereal import log as capnp_log
|
||||
from openpilot.tools.lib.filereader import FileReader
|
||||
from openpilot.tools.lib.route import Route, SegmentName
|
||||
|
||||
LogIterable = Iterable[capnp._DynamicStructReader]
|
||||
|
||||
# this is an iterator itself, and uses private variables from LogReader
|
||||
class MultiLogIterator:
|
||||
def __init__(self, log_paths, sort_by_time=False):
|
||||
self._log_paths = log_paths
|
||||
self.sort_by_time = sort_by_time
|
||||
|
||||
self._first_log_idx = next(i for i in range(len(log_paths)) if log_paths[i] is not None)
|
||||
self._current_log = self._first_log_idx
|
||||
self._idx = 0
|
||||
self._log_readers = [None]*len(log_paths)
|
||||
self.start_time = self._log_reader(self._first_log_idx)._ts[0]
|
||||
|
||||
def _log_reader(self, i):
|
||||
if self._log_readers[i] is None and self._log_paths[i] is not None:
|
||||
log_path = self._log_paths[i]
|
||||
self._log_readers[i] = LogReader(log_path, sort_by_time=self.sort_by_time)
|
||||
|
||||
return self._log_readers[i]
|
||||
|
||||
def __iter__(self) -> Iterator[capnp._DynamicStructReader]:
|
||||
return self
|
||||
|
||||
def _inc(self):
|
||||
lr = self._log_reader(self._current_log)
|
||||
if self._idx < len(lr._ents)-1:
|
||||
self._idx += 1
|
||||
else:
|
||||
self._idx = 0
|
||||
self._current_log = next(i for i in range(self._current_log + 1, len(self._log_readers) + 1)
|
||||
if i == len(self._log_readers) or self._log_paths[i] is not None)
|
||||
if self._current_log == len(self._log_readers):
|
||||
raise StopIteration
|
||||
|
||||
def __next__(self):
|
||||
while 1:
|
||||
lr = self._log_reader(self._current_log)
|
||||
ret = lr._ents[self._idx]
|
||||
self._inc()
|
||||
return ret
|
||||
|
||||
def tell(self):
|
||||
# returns seconds from start of log
|
||||
return (self._log_reader(self._current_log)._ts[self._idx] - self.start_time) * 1e-9
|
||||
|
||||
def seek(self, ts):
|
||||
# seek to nearest minute
|
||||
minute = int(ts/60)
|
||||
if minute >= len(self._log_paths) or self._log_paths[minute] is None:
|
||||
return False
|
||||
|
||||
self._current_log = minute
|
||||
|
||||
# HACK: O(n) seek afterward
|
||||
self._idx = 0
|
||||
while self.tell() < ts:
|
||||
self._inc()
|
||||
return True
|
||||
|
||||
def reset(self):
|
||||
self.__init__(self._log_paths, sort_by_time=self.sort_by_time)
|
||||
|
||||
|
||||
class LogReader:
|
||||
def __init__(self, fn, canonicalize=True, only_union_types=False, sort_by_time=False, dat=None):
|
||||
self.data_version = None
|
||||
self._only_union_types = only_union_types
|
||||
|
||||
ext = None
|
||||
if not dat:
|
||||
_, ext = os.path.splitext(urllib.parse.urlparse(fn).path)
|
||||
if ext not in ('', '.bz2'):
|
||||
# old rlogs weren't bz2 compressed
|
||||
raise Exception(f"unknown extension {ext}")
|
||||
|
||||
with FileReader(fn) as f:
|
||||
dat = f.read()
|
||||
|
||||
if ext == ".bz2" or dat.startswith(b'BZh9'):
|
||||
dat = bz2.decompress(dat)
|
||||
|
||||
ents = capnp_log.Event.read_multiple_bytes(dat)
|
||||
|
||||
_ents = []
|
||||
try:
|
||||
for e in ents:
|
||||
_ents.append(e)
|
||||
except capnp.KjException:
|
||||
warnings.warn("Corrupted events detected", RuntimeWarning, stacklevel=1)
|
||||
|
||||
self._ents = list(sorted(_ents, key=lambda x: x.logMonoTime) if sort_by_time else _ents)
|
||||
self._ts = [x.logMonoTime for x in self._ents]
|
||||
|
||||
@classmethod
|
||||
def from_bytes(cls, dat):
|
||||
return cls("", dat=dat)
|
||||
|
||||
def __iter__(self) -> Iterator[capnp._DynamicStructReader]:
|
||||
for ent in self._ents:
|
||||
if self._only_union_types:
|
||||
try:
|
||||
ent.which()
|
||||
yield ent
|
||||
except capnp.lib.capnp.KjException:
|
||||
pass
|
||||
else:
|
||||
yield ent
|
||||
|
||||
def logreader_from_route_or_segment(r, sort_by_time=False):
|
||||
sn = SegmentName(r, allow_route_name=True)
|
||||
route = Route(sn.route_name.canonical_name)
|
||||
if sn.segment_num < 0:
|
||||
return MultiLogIterator(route.log_paths(), sort_by_time=sort_by_time)
|
||||
else:
|
||||
return LogReader(route.log_paths()[sn.segment_num], sort_by_time=sort_by_time)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import codecs
|
||||
# capnproto <= 0.8.0 throws errors converting byte data to string
|
||||
# below line catches those errors and replaces the bytes with \x__
|
||||
codecs.register_error("strict", codecs.backslashreplace_errors)
|
||||
log_path = sys.argv[1]
|
||||
lr = LogReader(log_path, sort_by_time=True)
|
||||
for msg in lr:
|
||||
print(msg)
|
||||
257
tools/lib/route.py
Normal file
257
tools/lib/route.py
Normal file
@@ -0,0 +1,257 @@
|
||||
import os
|
||||
import re
|
||||
from urllib.parse import urlparse
|
||||
from collections import defaultdict
|
||||
from itertools import chain
|
||||
from typing import Optional
|
||||
|
||||
from openpilot.tools.lib.auth_config import get_token
|
||||
from openpilot.tools.lib.api import CommaApi
|
||||
from openpilot.tools.lib.helpers import RE
|
||||
|
||||
QLOG_FILENAMES = ['qlog', 'qlog.bz2']
|
||||
QCAMERA_FILENAMES = ['qcamera.ts']
|
||||
LOG_FILENAMES = ['rlog', 'rlog.bz2', 'raw_log.bz2']
|
||||
CAMERA_FILENAMES = ['fcamera.hevc', 'video.hevc']
|
||||
DCAMERA_FILENAMES = ['dcamera.hevc']
|
||||
ECAMERA_FILENAMES = ['ecamera.hevc']
|
||||
|
||||
class Route:
|
||||
def __init__(self, name, data_dir=None):
|
||||
self._name = RouteName(name)
|
||||
self.files = None
|
||||
if data_dir is not None:
|
||||
self._segments = self._get_segments_local(data_dir)
|
||||
else:
|
||||
self._segments = self._get_segments_remote()
|
||||
self.max_seg_number = self._segments[-1].name.segment_num
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def segments(self):
|
||||
return self._segments
|
||||
|
||||
def log_paths(self):
|
||||
log_path_by_seg_num = {s.name.segment_num: s.log_path for s in self._segments}
|
||||
return [log_path_by_seg_num.get(i, None) for i in range(self.max_seg_number+1)]
|
||||
|
||||
def qlog_paths(self):
|
||||
qlog_path_by_seg_num = {s.name.segment_num: s.qlog_path for s in self._segments}
|
||||
return [qlog_path_by_seg_num.get(i, None) for i in range(self.max_seg_number+1)]
|
||||
|
||||
def camera_paths(self):
|
||||
camera_path_by_seg_num = {s.name.segment_num: s.camera_path for s in self._segments}
|
||||
return [camera_path_by_seg_num.get(i, None) for i in range(self.max_seg_number+1)]
|
||||
|
||||
def dcamera_paths(self):
|
||||
dcamera_path_by_seg_num = {s.name.segment_num: s.dcamera_path for s in self._segments}
|
||||
return [dcamera_path_by_seg_num.get(i, None) for i in range(self.max_seg_number+1)]
|
||||
|
||||
def ecamera_paths(self):
|
||||
ecamera_path_by_seg_num = {s.name.segment_num: s.ecamera_path for s in self._segments}
|
||||
return [ecamera_path_by_seg_num.get(i, None) for i in range(self.max_seg_number+1)]
|
||||
|
||||
def qcamera_paths(self):
|
||||
qcamera_path_by_seg_num = {s.name.segment_num: s.qcamera_path for s in self._segments}
|
||||
return [qcamera_path_by_seg_num.get(i, None) for i in range(self.max_seg_number+1)]
|
||||
|
||||
# TODO: refactor this, it's super repetitive
|
||||
def _get_segments_remote(self):
|
||||
api = CommaApi(get_token())
|
||||
route_files = api.get('v1/route/' + self.name.canonical_name + '/files')
|
||||
self.files = list(chain.from_iterable(route_files.values()))
|
||||
|
||||
segments = {}
|
||||
for url in self.files:
|
||||
_, dongle_id, time_str, segment_num, fn = urlparse(url).path.rsplit('/', maxsplit=4)
|
||||
segment_name = f'{dongle_id}|{time_str}--{segment_num}'
|
||||
if segments.get(segment_name):
|
||||
segments[segment_name] = Segment(
|
||||
segment_name,
|
||||
url if fn in LOG_FILENAMES else segments[segment_name].log_path,
|
||||
url if fn in QLOG_FILENAMES else segments[segment_name].qlog_path,
|
||||
url if fn in CAMERA_FILENAMES else segments[segment_name].camera_path,
|
||||
url if fn in DCAMERA_FILENAMES else segments[segment_name].dcamera_path,
|
||||
url if fn in ECAMERA_FILENAMES else segments[segment_name].ecamera_path,
|
||||
url if fn in QCAMERA_FILENAMES else segments[segment_name].qcamera_path,
|
||||
)
|
||||
else:
|
||||
segments[segment_name] = Segment(
|
||||
segment_name,
|
||||
url if fn in LOG_FILENAMES else None,
|
||||
url if fn in QLOG_FILENAMES else None,
|
||||
url if fn in CAMERA_FILENAMES else None,
|
||||
url if fn in DCAMERA_FILENAMES else None,
|
||||
url if fn in ECAMERA_FILENAMES else None,
|
||||
url if fn in QCAMERA_FILENAMES else None,
|
||||
)
|
||||
|
||||
return sorted(segments.values(), key=lambda seg: seg.name.segment_num)
|
||||
|
||||
def _get_segments_local(self, data_dir):
|
||||
files = os.listdir(data_dir)
|
||||
segment_files = defaultdict(list)
|
||||
|
||||
for f in files:
|
||||
fullpath = os.path.join(data_dir, f)
|
||||
explorer_match = re.match(RE.EXPLORER_FILE, f)
|
||||
op_match = re.match(RE.OP_SEGMENT_DIR, f)
|
||||
|
||||
if explorer_match:
|
||||
segment_name = explorer_match.group('segment_name')
|
||||
fn = explorer_match.group('file_name')
|
||||
if segment_name.replace('_', '|').startswith(self.name.canonical_name):
|
||||
segment_files[segment_name].append((fullpath, fn))
|
||||
elif op_match and os.path.isdir(fullpath):
|
||||
segment_name = op_match.group('segment_name')
|
||||
if segment_name.startswith(self.name.canonical_name):
|
||||
for seg_f in os.listdir(fullpath):
|
||||
segment_files[segment_name].append((os.path.join(fullpath, seg_f), seg_f))
|
||||
elif f == self.name.canonical_name:
|
||||
for seg_num in os.listdir(fullpath):
|
||||
if not seg_num.isdigit():
|
||||
continue
|
||||
|
||||
segment_name = f'{self.name.canonical_name}--{seg_num}'
|
||||
for seg_f in os.listdir(os.path.join(fullpath, seg_num)):
|
||||
segment_files[segment_name].append((os.path.join(fullpath, seg_num, seg_f), seg_f))
|
||||
|
||||
segments = []
|
||||
for segment, files in segment_files.items():
|
||||
|
||||
try:
|
||||
log_path = next(path for path, filename in files if filename in LOG_FILENAMES)
|
||||
except StopIteration:
|
||||
log_path = None
|
||||
|
||||
try:
|
||||
qlog_path = next(path for path, filename in files if filename in QLOG_FILENAMES)
|
||||
except StopIteration:
|
||||
qlog_path = None
|
||||
|
||||
try:
|
||||
camera_path = next(path for path, filename in files if filename in CAMERA_FILENAMES)
|
||||
except StopIteration:
|
||||
camera_path = None
|
||||
|
||||
try:
|
||||
dcamera_path = next(path for path, filename in files if filename in DCAMERA_FILENAMES)
|
||||
except StopIteration:
|
||||
dcamera_path = None
|
||||
|
||||
try:
|
||||
ecamera_path = next(path for path, filename in files if filename in ECAMERA_FILENAMES)
|
||||
except StopIteration:
|
||||
ecamera_path = None
|
||||
|
||||
try:
|
||||
qcamera_path = next(path for path, filename in files if filename in QCAMERA_FILENAMES)
|
||||
except StopIteration:
|
||||
qcamera_path = None
|
||||
|
||||
segments.append(Segment(segment, log_path, qlog_path, camera_path, dcamera_path, ecamera_path, qcamera_path))
|
||||
|
||||
if len(segments) == 0:
|
||||
raise ValueError(f'Could not find segments for route {self.name.canonical_name} in data directory {data_dir}')
|
||||
return sorted(segments, key=lambda seg: seg.name.segment_num)
|
||||
|
||||
class Segment:
|
||||
def __init__(self, name, log_path, qlog_path, camera_path, dcamera_path, ecamera_path, qcamera_path):
|
||||
self._name = SegmentName(name)
|
||||
self.log_path = log_path
|
||||
self.qlog_path = qlog_path
|
||||
self.camera_path = camera_path
|
||||
self.dcamera_path = dcamera_path
|
||||
self.ecamera_path = ecamera_path
|
||||
self.qcamera_path = qcamera_path
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
class RouteName:
|
||||
def __init__(self, name_str: str):
|
||||
self._name_str = name_str
|
||||
delim = next(c for c in self._name_str if c in ("|", "/"))
|
||||
self._dongle_id, self._time_str = self._name_str.split(delim)
|
||||
|
||||
assert len(self._dongle_id) == 16, self._name_str
|
||||
assert len(self._time_str) == 20, self._name_str
|
||||
self._canonical_name = f"{self._dongle_id}|{self._time_str}"
|
||||
|
||||
@property
|
||||
def canonical_name(self) -> str: return self._canonical_name
|
||||
|
||||
@property
|
||||
def dongle_id(self) -> str: return self._dongle_id
|
||||
|
||||
@property
|
||||
def time_str(self) -> str: return self._time_str
|
||||
|
||||
def __str__(self) -> str: return self._canonical_name
|
||||
|
||||
class SegmentName:
|
||||
# TODO: add constructor that takes dongle_id, time_str, segment_num and then create instances
|
||||
# of this class instead of manually constructing a segment name (use canonical_name prop instead)
|
||||
def __init__(self, name_str: str, allow_route_name=False):
|
||||
data_dir_path_separator_index = name_str.rsplit("|", 1)[0].rfind("/")
|
||||
use_data_dir = (data_dir_path_separator_index != -1) and ("|" in name_str)
|
||||
self._name_str = name_str[data_dir_path_separator_index + 1:] if use_data_dir else name_str
|
||||
self._data_dir = name_str[:data_dir_path_separator_index] if use_data_dir else None
|
||||
|
||||
seg_num_delim = "--" if self._name_str.count("--") == 2 else "/"
|
||||
name_parts = self._name_str.rsplit(seg_num_delim, 1)
|
||||
if allow_route_name and len(name_parts) == 1:
|
||||
name_parts.append("-1") # no segment number
|
||||
self._route_name = RouteName(name_parts[0])
|
||||
self._num = int(name_parts[1])
|
||||
self._canonical_name = f"{self._route_name._dongle_id}|{self._route_name._time_str}--{self._num}"
|
||||
|
||||
@property
|
||||
def canonical_name(self) -> str: return self._canonical_name
|
||||
|
||||
@property
|
||||
def dongle_id(self) -> str: return self._route_name.dongle_id
|
||||
|
||||
@property
|
||||
def time_str(self) -> str: return self._route_name.time_str
|
||||
|
||||
@property
|
||||
def segment_num(self) -> int: return self._num
|
||||
|
||||
@property
|
||||
def route_name(self) -> RouteName: return self._route_name
|
||||
|
||||
@property
|
||||
def data_dir(self) -> Optional[str]: return self._data_dir
|
||||
|
||||
def __str__(self) -> str: return self._canonical_name
|
||||
|
||||
|
||||
class SegmentRange:
|
||||
def __init__(self, segment_range: str):
|
||||
self.m = re.fullmatch(RE.SEGMENT_RANGE, segment_range)
|
||||
assert self.m, f"Segment range is not valid {segment_range}"
|
||||
|
||||
@property
|
||||
def route_name(self):
|
||||
return self.m.group("route_name")
|
||||
|
||||
@property
|
||||
def dongle_id(self):
|
||||
return self.m.group("dongle_id")
|
||||
|
||||
@property
|
||||
def timestamp(self):
|
||||
return self.m.group("timestamp")
|
||||
|
||||
@property
|
||||
def _slice(self):
|
||||
return self.m.group("slice")
|
||||
|
||||
@property
|
||||
def selector(self):
|
||||
return self.m.group("selector")
|
||||
141
tools/lib/srreader.py
Normal file
141
tools/lib/srreader.py
Normal file
@@ -0,0 +1,141 @@
|
||||
import enum
|
||||
import numpy as np
|
||||
import pathlib
|
||||
import re
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
|
||||
from openpilot.selfdrive.test.openpilotci import get_url
|
||||
from openpilot.tools.lib.helpers import RE
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
from openpilot.tools.lib.route import Route, SegmentRange
|
||||
|
||||
class ReadMode(enum.StrEnum):
|
||||
RLOG = "r" # only read rlogs
|
||||
QLOG = "q" # only read qlogs
|
||||
#AUTO = "a" # default to rlogs, fallback to qlogs, not supported yet
|
||||
|
||||
|
||||
def create_slice_from_string(s: str):
|
||||
m = re.fullmatch(RE.SLICE, s)
|
||||
assert m is not None, f"Invalid slice: {s}"
|
||||
start, end, step = m.groups()
|
||||
start = int(start) if start is not None else None
|
||||
end = int(end) if end is not None else None
|
||||
step = int(step) if step is not None else None
|
||||
|
||||
if start is not None and ":" not in s and end is None and step is None:
|
||||
return start
|
||||
return slice(start, end, step)
|
||||
|
||||
|
||||
def parse_slice(sr: SegmentRange):
|
||||
route = Route(sr.route_name)
|
||||
segs = np.arange(route.max_seg_number+1)
|
||||
s = create_slice_from_string(sr._slice)
|
||||
return segs[s] if isinstance(s, slice) else [segs[s]]
|
||||
|
||||
def comma_api_source(sr: SegmentRange, mode=ReadMode.RLOG, sort_by_time=False):
|
||||
segs = parse_slice(sr)
|
||||
route = Route(sr.route_name)
|
||||
|
||||
log_paths = route.log_paths() if mode == ReadMode.RLOG else route.qlog_paths()
|
||||
|
||||
invalid_segs = [seg for seg in segs if log_paths[seg] is None]
|
||||
|
||||
assert not len(invalid_segs), f"Some of the requested segments are not available: {invalid_segs}"
|
||||
|
||||
for seg in segs:
|
||||
yield LogReader(log_paths[seg], sort_by_time=sort_by_time)
|
||||
|
||||
def internal_source(sr: SegmentRange, mode=ReadMode.RLOG, sort_by_time=False):
|
||||
segs = parse_slice(sr)
|
||||
|
||||
for seg in segs:
|
||||
yield LogReader(f"cd:/{sr.dongle_id}/{sr.timestamp}/{seg}/{'rlog' if mode == ReadMode.RLOG else 'qlog'}.bz2", sort_by_time=sort_by_time)
|
||||
|
||||
def openpilotci_source(sr: SegmentRange, mode=ReadMode.RLOG, sort_by_time=False):
|
||||
segs = parse_slice(sr)
|
||||
|
||||
for seg in segs:
|
||||
yield LogReader(get_url(sr.route_name, seg, 'rlog' if mode == ReadMode.RLOG else 'qlog'), sort_by_time=sort_by_time)
|
||||
|
||||
def direct_source(file_or_url, sort_by_time):
|
||||
yield LogReader(file_or_url, sort_by_time=sort_by_time)
|
||||
|
||||
def auto_source(*args, **kwargs):
|
||||
# Automatically determine viable source
|
||||
|
||||
try:
|
||||
next(internal_source(*args, **kwargs))
|
||||
return internal_source(*args, **kwargs)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
try:
|
||||
next(openpilotci_source(*args, **kwargs))
|
||||
return openpilotci_source(*args, **kwargs)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return comma_api_source(*args, **kwargs)
|
||||
|
||||
def parse_useradmin(identifier):
|
||||
if "useradmin.comma.ai" in identifier:
|
||||
query = parse_qs(urlparse(identifier).query)
|
||||
return query["onebox"][0]
|
||||
return None
|
||||
|
||||
def parse_cabana(identifier):
|
||||
if "cabana.comma.ai" in identifier:
|
||||
query = parse_qs(urlparse(identifier).query)
|
||||
return query["route"][0]
|
||||
return None
|
||||
|
||||
def parse_cd(identifier):
|
||||
if "cd:/" in identifier:
|
||||
return identifier.replace("cd:/", "")
|
||||
return None
|
||||
|
||||
def parse_direct(identifier):
|
||||
if "https://" in identifier or "http://" in identifier or pathlib.Path(identifier).exists():
|
||||
return identifier
|
||||
return None
|
||||
|
||||
def parse_indirect(identifier):
|
||||
parsed = parse_useradmin(identifier) or parse_cabana(identifier)
|
||||
|
||||
if parsed is not None:
|
||||
return parsed, comma_api_source, True
|
||||
|
||||
parsed = parse_cd(identifier)
|
||||
if parsed is not None:
|
||||
return parsed, internal_source, True
|
||||
|
||||
return identifier, None, False
|
||||
|
||||
class SegmentRangeReader:
|
||||
def _logreaders_from_identifier(self, identifier):
|
||||
parsed, source, is_indirect = parse_indirect(identifier)
|
||||
|
||||
if not is_indirect:
|
||||
direct_parsed = parse_direct(identifier)
|
||||
if direct_parsed is not None:
|
||||
return direct_source(identifier, sort_by_time=self.sort_by_time)
|
||||
|
||||
sr = SegmentRange(parsed)
|
||||
mode = self.default_mode if sr.selector is None else ReadMode(sr.selector)
|
||||
source = self.default_source if source is None else source
|
||||
|
||||
return source(sr, mode, sort_by_time=self.sort_by_time)
|
||||
|
||||
def __init__(self, identifier: str, default_mode=ReadMode.RLOG, default_source=auto_source, sort_by_time=False):
|
||||
self.default_mode = default_mode
|
||||
self.default_source = default_source
|
||||
self.sort_by_time = sort_by_time
|
||||
|
||||
self.lrs = self._logreaders_from_identifier(identifier)
|
||||
|
||||
def __iter__(self):
|
||||
for lr in self.lrs:
|
||||
for m in lr:
|
||||
yield m
|
||||
0
tools/lib/tests/__init__.py
Normal file
0
tools/lib/tests/__init__.py
Normal file
129
tools/lib/tests/test_caching.py
Executable file
129
tools/lib/tests/test_caching.py
Executable file
@@ -0,0 +1,129 @@
|
||||
#!/usr/bin/env python3
|
||||
from functools import wraps
|
||||
import http.server
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import unittest
|
||||
|
||||
from parameterized import parameterized
|
||||
|
||||
from openpilot.tools.lib.url_file import URLFile
|
||||
|
||||
|
||||
class CachingTestRequestHandler(http.server.BaseHTTPRequestHandler):
|
||||
FILE_EXISTS = True
|
||||
|
||||
def do_GET(self):
|
||||
if self.FILE_EXISTS:
|
||||
self.send_response(200, b'1234')
|
||||
else:
|
||||
self.send_response(404)
|
||||
self.end_headers()
|
||||
|
||||
def do_HEAD(self):
|
||||
if self.FILE_EXISTS:
|
||||
self.send_response(200)
|
||||
self.send_header("Content-Length", "4")
|
||||
else:
|
||||
self.send_response(404)
|
||||
self.end_headers()
|
||||
|
||||
|
||||
class CachingTestServer(threading.Thread):
|
||||
def run(self):
|
||||
self.server = http.server.HTTPServer(("127.0.0.1", 0), CachingTestRequestHandler)
|
||||
self.port = self.server.server_port
|
||||
self.server.serve_forever()
|
||||
|
||||
def stop(self):
|
||||
self.server.server_close()
|
||||
self.server.shutdown()
|
||||
|
||||
def with_caching_server(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
server = CachingTestServer()
|
||||
server.start()
|
||||
time.sleep(0.25) # wait for server to get it's port
|
||||
try:
|
||||
func(*args, **kwargs, port=server.port)
|
||||
finally:
|
||||
server.stop()
|
||||
return wrapper
|
||||
|
||||
|
||||
class TestFileDownload(unittest.TestCase):
|
||||
|
||||
def compare_loads(self, url, start=0, length=None):
|
||||
"""Compares range between cached and non cached version"""
|
||||
file_cached = URLFile(url, cache=True)
|
||||
file_downloaded = URLFile(url, cache=False)
|
||||
|
||||
file_cached.seek(start)
|
||||
file_downloaded.seek(start)
|
||||
|
||||
self.assertEqual(file_cached.get_length(), file_downloaded.get_length())
|
||||
self.assertLessEqual(length + start if length is not None else 0, file_downloaded.get_length())
|
||||
|
||||
response_cached = file_cached.read(ll=length)
|
||||
response_downloaded = file_downloaded.read(ll=length)
|
||||
|
||||
self.assertEqual(response_cached, response_downloaded)
|
||||
|
||||
# Now test with cache in place
|
||||
file_cached = URLFile(url, cache=True)
|
||||
file_cached.seek(start)
|
||||
response_cached = file_cached.read(ll=length)
|
||||
|
||||
self.assertEqual(file_cached.get_length(), file_downloaded.get_length())
|
||||
self.assertEqual(response_cached, response_downloaded)
|
||||
|
||||
def test_small_file(self):
|
||||
# Make sure we don't force cache
|
||||
os.environ["FILEREADER_CACHE"] = "0"
|
||||
small_file_url = "https://raw.githubusercontent.com/commaai/openpilot/master/docs/SAFETY.md"
|
||||
# If you want large file to be larger than a chunk
|
||||
# large_file_url = "https://commadataci.blob.core.windows.net/openpilotci/0375fdf7b1ce594d/2019-06-13--08-32-25/3/fcamera.hevc"
|
||||
|
||||
# Load full small file
|
||||
self.compare_loads(small_file_url)
|
||||
|
||||
file_small = URLFile(small_file_url)
|
||||
length = file_small.get_length()
|
||||
|
||||
self.compare_loads(small_file_url, length - 100, 100)
|
||||
self.compare_loads(small_file_url, 50, 100)
|
||||
|
||||
# Load small file 100 bytes at a time
|
||||
for i in range(length // 100):
|
||||
self.compare_loads(small_file_url, 100 * i, 100)
|
||||
|
||||
def test_large_file(self):
|
||||
large_file_url = "https://commadataci.blob.core.windows.net/openpilotci/0375fdf7b1ce594d/2019-06-13--08-32-25/3/qlog.bz2"
|
||||
# Load the end 100 bytes of both files
|
||||
file_large = URLFile(large_file_url)
|
||||
length = file_large.get_length()
|
||||
|
||||
self.compare_loads(large_file_url, length - 100, 100)
|
||||
self.compare_loads(large_file_url)
|
||||
|
||||
@parameterized.expand([(True, ), (False, )])
|
||||
@with_caching_server
|
||||
def test_recover_from_missing_file(self, cache_enabled, port):
|
||||
os.environ["FILEREADER_CACHE"] = "1" if cache_enabled else "0"
|
||||
|
||||
file_url = f"http://localhost:{port}/test.png"
|
||||
|
||||
CachingTestRequestHandler.FILE_EXISTS = False
|
||||
length = URLFile(file_url).get_length()
|
||||
self.assertEqual(length, -1)
|
||||
|
||||
CachingTestRequestHandler.FILE_EXISTS = True
|
||||
length = URLFile(file_url).get_length()
|
||||
self.assertEqual(length, 4)
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
67
tools/lib/tests/test_readers.py
Executable file
67
tools/lib/tests/test_readers.py
Executable file
@@ -0,0 +1,67 @@
|
||||
#!/usr/bin/env python
|
||||
import unittest
|
||||
import requests
|
||||
import tempfile
|
||||
|
||||
from collections import defaultdict
|
||||
import numpy as np
|
||||
from openpilot.tools.lib.framereader import FrameReader
|
||||
from openpilot.tools.lib.logreader import LogReader
|
||||
|
||||
|
||||
class TestReaders(unittest.TestCase):
|
||||
@unittest.skip("skip for bandwidth reasons")
|
||||
def test_logreader(self):
|
||||
def _check_data(lr):
|
||||
hist = defaultdict(int)
|
||||
for l in lr:
|
||||
hist[l.which()] += 1
|
||||
|
||||
self.assertEqual(hist['carControl'], 6000)
|
||||
self.assertEqual(hist['logMessage'], 6857)
|
||||
|
||||
with tempfile.NamedTemporaryFile(suffix=".bz2") as fp:
|
||||
r = requests.get("https://github.com/commaai/comma2k19/blob/master/Example_1/b0c9d2329ad1606b%7C2018-08-02--08-34-47/40/raw_log.bz2?raw=true", timeout=10)
|
||||
fp.write(r.content)
|
||||
fp.flush()
|
||||
|
||||
lr_file = LogReader(fp.name)
|
||||
_check_data(lr_file)
|
||||
|
||||
lr_url = LogReader("https://github.com/commaai/comma2k19/blob/master/Example_1/b0c9d2329ad1606b%7C2018-08-02--08-34-47/40/raw_log.bz2?raw=true")
|
||||
_check_data(lr_url)
|
||||
|
||||
@unittest.skip("skip for bandwidth reasons")
|
||||
def test_framereader(self):
|
||||
def _check_data(f):
|
||||
self.assertEqual(f.frame_count, 1200)
|
||||
self.assertEqual(f.w, 1164)
|
||||
self.assertEqual(f.h, 874)
|
||||
|
||||
frame_first_30 = f.get(0, 30)
|
||||
self.assertEqual(len(frame_first_30), 30)
|
||||
|
||||
print(frame_first_30[15])
|
||||
|
||||
print("frame_0")
|
||||
frame_0 = f.get(0, 1)
|
||||
frame_15 = f.get(15, 1)
|
||||
|
||||
print(frame_15[0])
|
||||
|
||||
assert np.all(frame_first_30[0] == frame_0[0])
|
||||
assert np.all(frame_first_30[15] == frame_15[0])
|
||||
|
||||
with tempfile.NamedTemporaryFile(suffix=".hevc") as fp:
|
||||
r = requests.get("https://github.com/commaai/comma2k19/blob/master/Example_1/b0c9d2329ad1606b%7C2018-08-02--08-34-47/40/video.hevc?raw=true", timeout=10)
|
||||
fp.write(r.content)
|
||||
fp.flush()
|
||||
|
||||
fr_file = FrameReader(fp.name)
|
||||
_check_data(fr_file)
|
||||
|
||||
fr_url = FrameReader("https://github.com/commaai/comma2k19/blob/master/Example_1/b0c9d2329ad1606b%7C2018-08-02--08-34-47/40/video.hevc?raw=true")
|
||||
_check_data(fr_url)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
32
tools/lib/tests/test_route_library.py
Executable file
32
tools/lib/tests/test_route_library.py
Executable file
@@ -0,0 +1,32 @@
|
||||
#!/usr/bin/env python
|
||||
import unittest
|
||||
from collections import namedtuple
|
||||
|
||||
from openpilot.tools.lib.route import SegmentName
|
||||
|
||||
class TestRouteLibrary(unittest.TestCase):
|
||||
def test_segment_name_formats(self):
|
||||
Case = namedtuple('Case', ['input', 'expected_route', 'expected_segment_num', 'expected_data_dir'])
|
||||
|
||||
cases = [ Case("a2a0ccea32023010|2023-07-27--13-01-19", "a2a0ccea32023010|2023-07-27--13-01-19", -1, None),
|
||||
Case("a2a0ccea32023010/2023-07-27--13-01-19--1", "a2a0ccea32023010|2023-07-27--13-01-19", 1, None),
|
||||
Case("a2a0ccea32023010|2023-07-27--13-01-19/2", "a2a0ccea32023010|2023-07-27--13-01-19", 2, None),
|
||||
Case("a2a0ccea32023010/2023-07-27--13-01-19/3", "a2a0ccea32023010|2023-07-27--13-01-19", 3, None),
|
||||
Case("/data/media/0/realdata/a2a0ccea32023010|2023-07-27--13-01-19", "a2a0ccea32023010|2023-07-27--13-01-19", -1, "/data/media/0/realdata"),
|
||||
Case("/data/media/0/realdata/a2a0ccea32023010|2023-07-27--13-01-19--1", "a2a0ccea32023010|2023-07-27--13-01-19", 1, "/data/media/0/realdata"),
|
||||
Case("/data/media/0/realdata/a2a0ccea32023010|2023-07-27--13-01-19/2", "a2a0ccea32023010|2023-07-27--13-01-19", 2, "/data/media/0/realdata") ]
|
||||
|
||||
def _validate(case):
|
||||
route_or_segment_name = case.input
|
||||
|
||||
s = SegmentName(route_or_segment_name, allow_route_name=True)
|
||||
|
||||
self.assertEqual(str(s.route_name), case.expected_route)
|
||||
self.assertEqual(s.segment_num, case.expected_segment_num)
|
||||
self.assertEqual(s.data_dir, case.expected_data_dir)
|
||||
|
||||
for case in cases:
|
||||
_validate(case)
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
88
tools/lib/tests/test_srreader.py
Normal file
88
tools/lib/tests/test_srreader.py
Normal file
@@ -0,0 +1,88 @@
|
||||
import shutil
|
||||
import tempfile
|
||||
import numpy as np
|
||||
import unittest
|
||||
from parameterized import parameterized
|
||||
import requests
|
||||
|
||||
from openpilot.tools.lib.route import SegmentRange
|
||||
from openpilot.tools.lib.srreader import ReadMode, SegmentRangeReader, parse_slice, parse_indirect
|
||||
|
||||
NUM_SEGS = 17 # number of segments in the test route
|
||||
ALL_SEGS = list(np.arange(NUM_SEGS))
|
||||
TEST_ROUTE = "344c5c15b34f2d8a/2024-01-03--09-37-12"
|
||||
QLOG_FILE = "https://commadataci.blob.core.windows.net/openpilotci/0375fdf7b1ce594d/2019-06-13--08-32-25/3/qlog.bz2"
|
||||
|
||||
class TestSegmentRangeReader(unittest.TestCase):
|
||||
@parameterized.expand([
|
||||
(f"{TEST_ROUTE}", ALL_SEGS),
|
||||
(f"{TEST_ROUTE.replace('/', '|')}", ALL_SEGS),
|
||||
(f"{TEST_ROUTE}--0", [0]),
|
||||
(f"{TEST_ROUTE}--5", [5]),
|
||||
(f"{TEST_ROUTE}/0", [0]),
|
||||
(f"{TEST_ROUTE}/5", [5]),
|
||||
(f"{TEST_ROUTE}/0:10", ALL_SEGS[0:10]),
|
||||
(f"{TEST_ROUTE}/0:0", []),
|
||||
(f"{TEST_ROUTE}/4:6", ALL_SEGS[4:6]),
|
||||
(f"{TEST_ROUTE}/0:-1", ALL_SEGS[0:-1]),
|
||||
(f"{TEST_ROUTE}/:5", ALL_SEGS[:5]),
|
||||
(f"{TEST_ROUTE}/2:", ALL_SEGS[2:]),
|
||||
(f"{TEST_ROUTE}/2:-1", ALL_SEGS[2:-1]),
|
||||
(f"{TEST_ROUTE}/-1", [ALL_SEGS[-1]]),
|
||||
(f"{TEST_ROUTE}/-2", [ALL_SEGS[-2]]),
|
||||
(f"{TEST_ROUTE}/-2:-1", ALL_SEGS[-2:-1]),
|
||||
(f"{TEST_ROUTE}/-4:-2", ALL_SEGS[-4:-2]),
|
||||
(f"{TEST_ROUTE}/:10:2", ALL_SEGS[:10:2]),
|
||||
(f"{TEST_ROUTE}/5::2", ALL_SEGS[5::2]),
|
||||
(f"https://useradmin.comma.ai/?onebox={TEST_ROUTE}", ALL_SEGS),
|
||||
(f"https://useradmin.comma.ai/?onebox={TEST_ROUTE.replace('/', '|')}", ALL_SEGS),
|
||||
(f"https://useradmin.comma.ai/?onebox={TEST_ROUTE.replace('/', '%7C')}", ALL_SEGS),
|
||||
(f"https://cabana.comma.ai/?route={TEST_ROUTE}", ALL_SEGS),
|
||||
(f"cd:/{TEST_ROUTE}", ALL_SEGS),
|
||||
])
|
||||
def test_indirect_parsing(self, identifier, expected):
|
||||
parsed, _, _ = parse_indirect(identifier)
|
||||
sr = SegmentRange(parsed)
|
||||
segs = parse_slice(sr)
|
||||
self.assertListEqual(list(segs), expected)
|
||||
|
||||
def test_direct_parsing(self):
|
||||
qlog = tempfile.NamedTemporaryFile(mode='wb', delete=False)
|
||||
|
||||
with requests.get(QLOG_FILE, stream=True) as r:
|
||||
with qlog as f:
|
||||
shutil.copyfileobj(r.raw, f)
|
||||
|
||||
for f in [QLOG_FILE, qlog.name]:
|
||||
l = len(list(SegmentRangeReader(f)))
|
||||
self.assertGreater(l, 100)
|
||||
|
||||
@parameterized.expand([
|
||||
(f"{TEST_ROUTE}///",),
|
||||
(f"{TEST_ROUTE}---",),
|
||||
(f"{TEST_ROUTE}/-4:--2",),
|
||||
(f"{TEST_ROUTE}/-a",),
|
||||
(f"{TEST_ROUTE}/j",),
|
||||
(f"{TEST_ROUTE}/0:1:2:3",),
|
||||
(f"{TEST_ROUTE}/:::3",),
|
||||
])
|
||||
def test_bad_ranges(self, segment_range):
|
||||
with self.assertRaises(AssertionError):
|
||||
sr = SegmentRange(segment_range)
|
||||
parse_slice(sr)
|
||||
|
||||
def test_modes(self):
|
||||
qlog_len = len(list(SegmentRangeReader(f"{TEST_ROUTE}/0", ReadMode.QLOG)))
|
||||
rlog_len = len(list(SegmentRangeReader(f"{TEST_ROUTE}/0", ReadMode.RLOG)))
|
||||
|
||||
self.assertLess(qlog_len * 6, rlog_len)
|
||||
|
||||
def test_modes_from_name(self):
|
||||
qlog_len = len(list(SegmentRangeReader(f"{TEST_ROUTE}/0/q")))
|
||||
rlog_len = len(list(SegmentRangeReader(f"{TEST_ROUTE}/0/r")))
|
||||
|
||||
self.assertLess(qlog_len * 6, rlog_len)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
156
tools/lib/url_file.py
Normal file
156
tools/lib/url_file.py
Normal file
@@ -0,0 +1,156 @@
|
||||
import os
|
||||
import time
|
||||
import threading
|
||||
from hashlib import sha256
|
||||
from urllib3 import PoolManager
|
||||
from urllib3.util import Timeout
|
||||
from tenacity import retry, wait_random_exponential, stop_after_attempt
|
||||
|
||||
from openpilot.common.file_helpers import atomic_write_in_dir
|
||||
from openpilot.system.hardware.hw import Paths
|
||||
# Cache chunk size
|
||||
K = 1000
|
||||
CHUNK_SIZE = 1000 * K
|
||||
|
||||
|
||||
def hash_256(link):
|
||||
hsh = str(sha256((link.split("?")[0]).encode('utf-8')).hexdigest())
|
||||
return hsh
|
||||
|
||||
|
||||
class URLFileException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class URLFile:
|
||||
_tlocal = threading.local()
|
||||
|
||||
def __init__(self, url, debug=False, cache=None):
|
||||
self._url = url
|
||||
self._pos = 0
|
||||
self._length = None
|
||||
self._local_file = None
|
||||
self._debug = debug
|
||||
# True by default, false if FILEREADER_CACHE is defined, but can be overwritten by the cache input
|
||||
self._force_download = not int(os.environ.get("FILEREADER_CACHE", "0"))
|
||||
if cache is not None:
|
||||
self._force_download = not cache
|
||||
|
||||
if not self._force_download:
|
||||
os.makedirs(Paths.download_cache_root(), exist_ok=True)
|
||||
|
||||
try:
|
||||
self._http_client = URLFile._tlocal.http_client
|
||||
except AttributeError:
|
||||
self._http_client = URLFile._tlocal.http_client = PoolManager()
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
if self._local_file is not None:
|
||||
os.remove(self._local_file.name)
|
||||
self._local_file.close()
|
||||
self._local_file = None
|
||||
|
||||
@retry(wait=wait_random_exponential(multiplier=1, max=5), stop=stop_after_attempt(3), reraise=True)
|
||||
def get_length_online(self):
|
||||
timeout = Timeout(connect=50.0, read=500.0)
|
||||
response = self._http_client.request('HEAD', self._url, timeout=timeout, preload_content=False)
|
||||
if not (200 <= response.status <= 299):
|
||||
return -1
|
||||
length = response.headers.get('content-length', 0)
|
||||
return int(length)
|
||||
|
||||
def get_length(self):
|
||||
if self._length is not None:
|
||||
return self._length
|
||||
|
||||
file_length_path = os.path.join(Paths.download_cache_root(), hash_256(self._url) + "_length")
|
||||
if not self._force_download and os.path.exists(file_length_path):
|
||||
with open(file_length_path) as file_length:
|
||||
content = file_length.read()
|
||||
self._length = int(content)
|
||||
return self._length
|
||||
|
||||
self._length = self.get_length_online()
|
||||
if not self._force_download and self._length != -1:
|
||||
with atomic_write_in_dir(file_length_path, mode="w") as file_length:
|
||||
file_length.write(str(self._length))
|
||||
return self._length
|
||||
|
||||
def read(self, ll=None):
|
||||
if self._force_download:
|
||||
return self.read_aux(ll=ll)
|
||||
|
||||
file_begin = self._pos
|
||||
file_end = self._pos + ll if ll is not None else self.get_length()
|
||||
assert file_end != -1, f"Remote file is empty or doesn't exist: {self._url}"
|
||||
# We have to align with chunks we store. Position is the begginiing of the latest chunk that starts before or at our file
|
||||
position = (file_begin // CHUNK_SIZE) * CHUNK_SIZE
|
||||
response = b""
|
||||
while True:
|
||||
self._pos = position
|
||||
chunk_number = self._pos / CHUNK_SIZE
|
||||
file_name = hash_256(self._url) + "_" + str(chunk_number)
|
||||
full_path = os.path.join(Paths.download_cache_root(), str(file_name))
|
||||
data = None
|
||||
# If we don't have a file, download it
|
||||
if not os.path.exists(full_path):
|
||||
data = self.read_aux(ll=CHUNK_SIZE)
|
||||
with atomic_write_in_dir(full_path, mode="wb") as new_cached_file:
|
||||
new_cached_file.write(data)
|
||||
else:
|
||||
with open(full_path, "rb") as cached_file:
|
||||
data = cached_file.read()
|
||||
|
||||
response += data[max(0, file_begin - position): min(CHUNK_SIZE, file_end - position)]
|
||||
|
||||
position += CHUNK_SIZE
|
||||
if position >= file_end:
|
||||
self._pos = file_end
|
||||
return response
|
||||
|
||||
@retry(wait=wait_random_exponential(multiplier=1, max=5), stop=stop_after_attempt(3), reraise=True)
|
||||
def read_aux(self, ll=None):
|
||||
download_range = False
|
||||
headers = {'Connection': 'keep-alive'}
|
||||
if self._pos != 0 or ll is not None:
|
||||
if ll is None:
|
||||
end = self.get_length() - 1
|
||||
else:
|
||||
end = min(self._pos + ll, self.get_length()) - 1
|
||||
if self._pos >= end:
|
||||
return b""
|
||||
headers['Range'] = f"bytes={self._pos}-{end}"
|
||||
download_range = True
|
||||
|
||||
if self._debug:
|
||||
t1 = time.time()
|
||||
|
||||
timeout = Timeout(connect=50.0, read=500.0)
|
||||
response = self._http_client.request('GET', self._url, timeout=timeout, preload_content=False, headers=headers)
|
||||
ret = response.data
|
||||
|
||||
if self._debug:
|
||||
t2 = time.time()
|
||||
if t2 - t1 > 0.1:
|
||||
print(f"get {self._url} {headers!r} {t2 - t1:.3f} slow")
|
||||
|
||||
response_code = response.status
|
||||
if response_code == 416: # Requested Range Not Satisfiable
|
||||
raise URLFileException(f"Error, range out of bounds {response_code} {headers} ({self._url}): {repr(ret)[:500]}")
|
||||
if download_range and response_code != 206: # Partial Content
|
||||
raise URLFileException(f"Error, requested range but got unexpected response {response_code} {headers} ({self._url}): {repr(ret)[:500]}")
|
||||
if (not download_range) and response_code != 200: # OK
|
||||
raise URLFileException(f"Error {response_code} {headers} ({self._url}): {repr(ret)[:500]}")
|
||||
|
||||
self._pos += len(ret)
|
||||
return ret
|
||||
|
||||
def seek(self, pos):
|
||||
self._pos = pos
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._url
|
||||
312
tools/lib/vidindex.py
Executable file
312
tools/lib/vidindex.py
Executable file
@@ -0,0 +1,312 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import os
|
||||
import struct
|
||||
from enum import IntEnum
|
||||
from typing import Tuple
|
||||
|
||||
from openpilot.tools.lib.filereader import FileReader
|
||||
|
||||
DEBUG = int(os.getenv("DEBUG", "0"))
|
||||
|
||||
# compare to ffmpeg parsing
|
||||
# ffmpeg -i <input.hevc> -c copy -bsf:v trace_headers -f null - 2>&1 | grep -B4 -A32 '] 0 '
|
||||
|
||||
# H.265 specification
|
||||
# https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-H.265-201802-S!!PDF-E&type=items
|
||||
|
||||
NAL_UNIT_START_CODE = b"\x00\x00\x01"
|
||||
NAL_UNIT_START_CODE_SIZE = len(NAL_UNIT_START_CODE)
|
||||
NAL_UNIT_HEADER_SIZE = 2
|
||||
|
||||
class HevcNalUnitType(IntEnum):
|
||||
TRAIL_N = 0 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
TRAIL_R = 1 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
TSA_N = 2 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
TSA_R = 3 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
STSA_N = 4 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
STSA_R = 5 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
RADL_N = 6 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
RADL_R = 7 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
RASL_N = 8 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
RASL_R = 9 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
RSV_VCL_N10 = 10
|
||||
RSV_VCL_R11 = 11
|
||||
RSV_VCL_N12 = 12
|
||||
RSV_VCL_R13 = 13
|
||||
RSV_VCL_N14 = 14
|
||||
RSV_VCL_R15 = 15
|
||||
BLA_W_LP = 16 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
BLA_W_RADL = 17 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
BLA_N_LP = 18 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
IDR_W_RADL = 19 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
IDR_N_LP = 20 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
CRA_NUT = 21 # RBSP structure: slice_segment_layer_rbsp( )
|
||||
RSV_IRAP_VCL22 = 22
|
||||
RSV_IRAP_VCL23 = 23
|
||||
RSV_VCL24 = 24
|
||||
RSV_VCL25 = 25
|
||||
RSV_VCL26 = 26
|
||||
RSV_VCL27 = 27
|
||||
RSV_VCL28 = 28
|
||||
RSV_VCL29 = 29
|
||||
RSV_VCL30 = 30
|
||||
RSV_VCL31 = 31
|
||||
VPS_NUT = 32 # RBSP structure: video_parameter_set_rbsp( )
|
||||
SPS_NUT = 33 # RBSP structure: seq_parameter_set_rbsp( )
|
||||
PPS_NUT = 34 # RBSP structure: pic_parameter_set_rbsp( )
|
||||
AUD_NUT = 35
|
||||
EOS_NUT = 36
|
||||
EOB_NUT = 37
|
||||
FD_NUT = 38
|
||||
PREFIX_SEI_NUT = 39
|
||||
SUFFIX_SEI_NUT = 40
|
||||
RSV_NVCL41 = 41
|
||||
RSV_NVCL42 = 42
|
||||
RSV_NVCL43 = 43
|
||||
RSV_NVCL44 = 44
|
||||
RSV_NVCL45 = 45
|
||||
RSV_NVCL46 = 46
|
||||
RSV_NVCL47 = 47
|
||||
UNSPEC48 = 48
|
||||
UNSPEC49 = 49
|
||||
UNSPEC50 = 50
|
||||
UNSPEC51 = 51
|
||||
UNSPEC52 = 52
|
||||
UNSPEC53 = 53
|
||||
UNSPEC54 = 54
|
||||
UNSPEC55 = 55
|
||||
UNSPEC56 = 56
|
||||
UNSPEC57 = 57
|
||||
UNSPEC58 = 58
|
||||
UNSPEC59 = 59
|
||||
UNSPEC60 = 60
|
||||
UNSPEC61 = 61
|
||||
UNSPEC62 = 62
|
||||
UNSPEC63 = 63
|
||||
|
||||
# B.2.2 Byte stream NAL unit semantics
|
||||
# - The nal_unit_type within the nal_unit( ) syntax structure is equal to VPS_NUT, SPS_NUT or PPS_NUT.
|
||||
# - The byte stream NAL unit syntax structure contains the first NAL unit of an access unit in decoding
|
||||
# order, as specified in clause 7.4.2.4.4.
|
||||
HEVC_PARAMETER_SET_NAL_UNITS = (
|
||||
HevcNalUnitType.VPS_NUT,
|
||||
HevcNalUnitType.SPS_NUT,
|
||||
HevcNalUnitType.PPS_NUT,
|
||||
)
|
||||
|
||||
# 3.29 coded slice segment NAL unit: A NAL unit that has nal_unit_type in the range of TRAIL_N to RASL_R,
|
||||
# inclusive, or in the range of BLA_W_LP to RSV_IRAP_VCL23, inclusive, which indicates that the NAL unit
|
||||
# contains a coded slice segment
|
||||
HEVC_CODED_SLICE_SEGMENT_NAL_UNITS = (
|
||||
HevcNalUnitType.TRAIL_N,
|
||||
HevcNalUnitType.TRAIL_R,
|
||||
HevcNalUnitType.TSA_N,
|
||||
HevcNalUnitType.TSA_R,
|
||||
HevcNalUnitType.STSA_N,
|
||||
HevcNalUnitType.STSA_R,
|
||||
HevcNalUnitType.RADL_N,
|
||||
HevcNalUnitType.RADL_R,
|
||||
HevcNalUnitType.RASL_N,
|
||||
HevcNalUnitType.RASL_R,
|
||||
HevcNalUnitType.BLA_W_LP,
|
||||
HevcNalUnitType.BLA_W_RADL,
|
||||
HevcNalUnitType.BLA_N_LP,
|
||||
HevcNalUnitType.IDR_W_RADL,
|
||||
HevcNalUnitType.IDR_N_LP,
|
||||
HevcNalUnitType.CRA_NUT,
|
||||
)
|
||||
|
||||
class VideoFileInvalid(Exception):
|
||||
pass
|
||||
|
||||
def get_ue(dat: bytes, start_idx: int, skip_bits: int) -> Tuple[int, int]:
|
||||
prefix_val = 0
|
||||
prefix_len = 0
|
||||
suffix_val = 0
|
||||
suffix_len = 0
|
||||
|
||||
i = start_idx
|
||||
while i < len(dat):
|
||||
j = 7
|
||||
while j >= 0:
|
||||
if skip_bits > 0:
|
||||
skip_bits -= 1
|
||||
elif prefix_val == 0:
|
||||
prefix_val = (dat[i] >> j) & 1
|
||||
prefix_len += 1
|
||||
else:
|
||||
suffix_val = (suffix_val << 1) | ((dat[i] >> j) & 1)
|
||||
suffix_len += 1
|
||||
j -= 1
|
||||
|
||||
if prefix_val == 1 and prefix_len - 1 == suffix_len:
|
||||
val = 2**(prefix_len-1) - 1 + suffix_val
|
||||
size = prefix_len + suffix_len
|
||||
return val, size
|
||||
i += 1
|
||||
|
||||
raise VideoFileInvalid("invalid exponential-golomb code")
|
||||
|
||||
def require_nal_unit_start(dat: bytes, nal_unit_start: int) -> None:
|
||||
if nal_unit_start < 1:
|
||||
raise ValueError("start index must be greater than zero")
|
||||
|
||||
if dat[nal_unit_start:nal_unit_start + NAL_UNIT_START_CODE_SIZE] != NAL_UNIT_START_CODE:
|
||||
raise VideoFileInvalid("data must begin with start code")
|
||||
|
||||
def get_hevc_nal_unit_length(dat: bytes, nal_unit_start: int) -> int:
|
||||
try:
|
||||
pos = dat.index(NAL_UNIT_START_CODE, nal_unit_start + NAL_UNIT_START_CODE_SIZE)
|
||||
except ValueError:
|
||||
pos = -1
|
||||
|
||||
# length of NAL unit is byte count up to next NAL unit start index
|
||||
nal_unit_len = (pos if pos != -1 else len(dat)) - nal_unit_start
|
||||
if DEBUG:
|
||||
print(" nal_unit_len:", nal_unit_len)
|
||||
return nal_unit_len
|
||||
|
||||
def get_hevc_nal_unit_type(dat: bytes, nal_unit_start: int) -> HevcNalUnitType:
|
||||
# 7.3.1.2 NAL unit header syntax
|
||||
# nal_unit_header( ) { // descriptor
|
||||
# forbidden_zero_bit f(1)
|
||||
# nal_unit_type u(6)
|
||||
# nuh_layer_id u(6)
|
||||
# nuh_temporal_id_plus1 u(3)
|
||||
# }
|
||||
header_start = nal_unit_start + NAL_UNIT_START_CODE_SIZE
|
||||
nal_unit_header = dat[header_start:header_start + NAL_UNIT_HEADER_SIZE]
|
||||
if len(nal_unit_header) != 2:
|
||||
raise VideoFileInvalid("data to short to contain nal unit header")
|
||||
nal_unit_type = HevcNalUnitType((nal_unit_header[0] >> 1) & 0x3F)
|
||||
if DEBUG:
|
||||
print(" nal_unit_type:", nal_unit_type.name, f"({nal_unit_type.value})")
|
||||
return nal_unit_type
|
||||
|
||||
def get_hevc_slice_type(dat: bytes, nal_unit_start: int, nal_unit_type: HevcNalUnitType) -> Tuple[int, bool]:
|
||||
# 7.3.2.9 Slice segment layer RBSP syntax
|
||||
# slice_segment_layer_rbsp( ) {
|
||||
# slice_segment_header( )
|
||||
# slice_segment_data( )
|
||||
# rbsp_slice_segment_trailing_bits( )
|
||||
# }
|
||||
# ...
|
||||
# 7.3.6.1 General slice segment header syntax
|
||||
# slice_segment_header( ) { // descriptor
|
||||
# first_slice_segment_in_pic_flag u(1)
|
||||
# if( nal_unit_type >= BLA_W_LP && nal_unit_type <= RSV_IRAP_VCL23 )
|
||||
# no_output_of_prior_pics_flag u(1)
|
||||
# slice_pic_parameter_set_id ue(v)
|
||||
# if( !first_slice_segment_in_pic_flag ) {
|
||||
# if( dependent_slice_segments_enabled_flag )
|
||||
# dependent_slice_segment_flag u(1)
|
||||
# slice_segment_address u(v)
|
||||
# }
|
||||
# if( !dependent_slice_segment_flag ) {
|
||||
# for( i = 0; i < num_extra_slice_header_bits; i++ )
|
||||
# slice_reserved_flag[ i ] u(1)
|
||||
# slice_type ue(v)
|
||||
# ...
|
||||
|
||||
rbsp_start = nal_unit_start + NAL_UNIT_START_CODE_SIZE + NAL_UNIT_HEADER_SIZE
|
||||
skip_bits = 0
|
||||
|
||||
# 7.4.7.1 General slice segment header semantics
|
||||
# first_slice_segment_in_pic_flag equal to 1 specifies that the slice segment is the first slice segment of the picture in
|
||||
# decoding order. first_slice_segment_in_pic_flag equal to 0 specifies that the slice segment is not the first slice segment
|
||||
# of the picture in decoding order.
|
||||
is_first_slice = dat[rbsp_start] >> 7 & 1 == 1
|
||||
if not is_first_slice:
|
||||
# TODO: parse dependent_slice_segment_flag and slice_segment_address and get real slice_type
|
||||
# for now since we don't use it return -1 for slice_type
|
||||
return (-1, is_first_slice)
|
||||
skip_bits += 1 # skip past first_slice_segment_in_pic_flag
|
||||
|
||||
if nal_unit_type >= HevcNalUnitType.BLA_W_LP and nal_unit_type <= HevcNalUnitType.RSV_IRAP_VCL23:
|
||||
# 7.4.7.1 General slice segment header semantics
|
||||
# no_output_of_prior_pics_flag affects the output of previously-decoded pictures in the decoded picture buffer after the
|
||||
# decoding of an IDR or a BLA picture that is not the first picture in the bitstream as specified in Annex C.
|
||||
skip_bits += 1 # skip past no_output_of_prior_pics_flag
|
||||
|
||||
# 7.4.7.1 General slice segment header semantics
|
||||
# slice_pic_parameter_set_id specifies the value of pps_pic_parameter_set_id for the PPS in use.
|
||||
# The value of slice_pic_parameter_set_id shall be in the range of 0 to 63, inclusive.
|
||||
_, size = get_ue(dat, rbsp_start, skip_bits)
|
||||
skip_bits += size # skip past slice_pic_parameter_set_id
|
||||
|
||||
# 7.4.3.3.1 General picture parameter set RBSP semanal_unit_lenntics
|
||||
# num_extra_slice_header_bits specifies the number of extra slice header bits that are present in the slice header RBSP
|
||||
# for coded pictures referring to the PPS. The value of num_extra_slice_header_bits shall be in the range of 0 to 2, inclusive,
|
||||
# in bitstreams conforming to this version of this Specification. Other values for num_extra_slice_header_bits are reserved
|
||||
# for future use by ITU-T | ISO/IEC. However, decoders shall allow num_extra_slice_header_bits to have any value.
|
||||
# TODO: get from PPS_NUT pic_parameter_set_rbsp( ) for corresponding slice_pic_parameter_set_id
|
||||
num_extra_slice_header_bits = 0
|
||||
skip_bits += num_extra_slice_header_bits
|
||||
|
||||
# 7.4.7.1 General slice segment header semantics
|
||||
# slice_type specifies the coding type of the slice according to Table 7-7.
|
||||
# Table 7-7 - Name association to slice_type
|
||||
# slice_type | Name of slice_type
|
||||
# 0 | B (B slice)
|
||||
# 1 | P (P slice)
|
||||
# 2 | I (I slice)
|
||||
# unsigned integer 0-th order Exp-Golomb-coded syntax element with the left bit first
|
||||
slice_type, _ = get_ue(dat, rbsp_start, skip_bits)
|
||||
if DEBUG:
|
||||
print(" slice_type:", slice_type, f"(first slice: {is_first_slice})")
|
||||
if slice_type > 2:
|
||||
raise VideoFileInvalid("slice_type must be 0, 1, or 2")
|
||||
return slice_type, is_first_slice
|
||||
|
||||
def hevc_index(hevc_file_name: str, allow_corrupt: bool=False) -> Tuple[list, int, bytes]:
|
||||
with FileReader(hevc_file_name) as f:
|
||||
dat = f.read()
|
||||
|
||||
if len(dat) < NAL_UNIT_START_CODE_SIZE + 1:
|
||||
raise VideoFileInvalid("data is too short")
|
||||
|
||||
if dat[0] != 0x00:
|
||||
raise VideoFileInvalid("first byte must be 0x00")
|
||||
|
||||
prefix_dat = b""
|
||||
frame_types = list()
|
||||
|
||||
i = 1 # skip past first byte 0x00
|
||||
try:
|
||||
while i < len(dat):
|
||||
require_nal_unit_start(dat, i)
|
||||
nal_unit_len = get_hevc_nal_unit_length(dat, i)
|
||||
nal_unit_type = get_hevc_nal_unit_type(dat, i)
|
||||
if nal_unit_type in HEVC_PARAMETER_SET_NAL_UNITS:
|
||||
prefix_dat += dat[i:i+nal_unit_len]
|
||||
elif nal_unit_type in HEVC_CODED_SLICE_SEGMENT_NAL_UNITS:
|
||||
slice_type, is_first_slice = get_hevc_slice_type(dat, i, nal_unit_type)
|
||||
if is_first_slice:
|
||||
frame_types.append((slice_type, i))
|
||||
i += nal_unit_len
|
||||
except Exception as e:
|
||||
if not allow_corrupt:
|
||||
raise
|
||||
print(f"ERROR: NAL unit skipped @ {i}\n", str(e))
|
||||
|
||||
return frame_types, len(dat), prefix_dat
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("input_file", type=str)
|
||||
parser.add_argument("output_prefix_file", type=str)
|
||||
parser.add_argument("output_index_file", type=str)
|
||||
args = parser.parse_args()
|
||||
|
||||
frame_types, dat_len, prefix_dat = hevc_index(args.input_file)
|
||||
with open(args.output_prefix_file, "wb") as f:
|
||||
f.write(prefix_dat)
|
||||
|
||||
with open(args.output_index_file, "wb") as f:
|
||||
for ft, fp in frame_types:
|
||||
f.write(struct.pack("<II", ft, fp))
|
||||
f.write(struct.pack("<II", 0xFFFFFFFF, dat_len))
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
103
tools/replay/camera.cc
Normal file
103
tools/replay/camera.cc
Normal file
@@ -0,0 +1,103 @@
|
||||
#include "tools/replay/camera.h"
|
||||
|
||||
#include <cassert>
|
||||
#include <tuple>
|
||||
|
||||
#include "third_party/linux/include/msm_media_info.h"
|
||||
#include "tools/replay/util.h"
|
||||
|
||||
std::tuple<size_t, size_t, size_t> get_nv12_info(int width, int height) {
|
||||
int nv12_width = VENUS_Y_STRIDE(COLOR_FMT_NV12, width);
|
||||
int nv12_height = VENUS_Y_SCANLINES(COLOR_FMT_NV12, height);
|
||||
assert(nv12_width == VENUS_UV_STRIDE(COLOR_FMT_NV12, width));
|
||||
assert(nv12_height / 2 == VENUS_UV_SCANLINES(COLOR_FMT_NV12, height));
|
||||
size_t nv12_buffer_size = 2346 * nv12_width; // comes from v4l2_format.fmt.pix_mp.plane_fmt[0].sizeimage
|
||||
return {nv12_width, nv12_height, nv12_buffer_size};
|
||||
}
|
||||
|
||||
CameraServer::CameraServer(std::pair<int, int> camera_size[MAX_CAMERAS]) {
|
||||
for (int i = 0; i < MAX_CAMERAS; ++i) {
|
||||
std::tie(cameras_[i].width, cameras_[i].height) = camera_size[i];
|
||||
}
|
||||
startVipcServer();
|
||||
}
|
||||
|
||||
CameraServer::~CameraServer() {
|
||||
for (auto &cam : cameras_) {
|
||||
if (cam.thread.joinable()) {
|
||||
cam.queue.push({});
|
||||
cam.thread.join();
|
||||
}
|
||||
}
|
||||
vipc_server_.reset(nullptr);
|
||||
}
|
||||
|
||||
void CameraServer::startVipcServer() {
|
||||
vipc_server_.reset(new VisionIpcServer("camerad"));
|
||||
for (auto &cam : cameras_) {
|
||||
if (cam.width > 0 && cam.height > 0) {
|
||||
rInfo("camera[%d] frame size %dx%d", cam.type, cam.width, cam.height);
|
||||
auto [nv12_width, nv12_height, nv12_buffer_size] = get_nv12_info(cam.width, cam.height);
|
||||
vipc_server_->create_buffers_with_sizes(cam.stream_type, YUV_BUFFER_COUNT, false, cam.width, cam.height,
|
||||
nv12_buffer_size, nv12_width, nv12_width * nv12_height);
|
||||
if (!cam.thread.joinable()) {
|
||||
cam.thread = std::thread(&CameraServer::cameraThread, this, std::ref(cam));
|
||||
}
|
||||
}
|
||||
}
|
||||
vipc_server_->start_listener();
|
||||
}
|
||||
|
||||
void CameraServer::cameraThread(Camera &cam) {
|
||||
auto read_frame = [&](FrameReader *fr, int frame_id) {
|
||||
VisionBuf *yuv_buf = vipc_server_->get_buffer(cam.stream_type);
|
||||
assert(yuv_buf);
|
||||
bool ret = fr->get(frame_id, yuv_buf);
|
||||
return ret ? yuv_buf : nullptr;
|
||||
};
|
||||
|
||||
while (true) {
|
||||
const auto [fr, eidx] = cam.queue.pop();
|
||||
if (!fr) break;
|
||||
|
||||
const int id = eidx.getSegmentId();
|
||||
bool prefetched = (id == cam.cached_id && eidx.getSegmentNum() == cam.cached_seg);
|
||||
auto yuv = prefetched ? cam.cached_buf : read_frame(fr, id);
|
||||
if (yuv) {
|
||||
VisionIpcBufExtra extra = {
|
||||
.frame_id = eidx.getFrameId(),
|
||||
.timestamp_sof = eidx.getTimestampSof(),
|
||||
.timestamp_eof = eidx.getTimestampEof(),
|
||||
};
|
||||
yuv->set_frame_id(eidx.getFrameId());
|
||||
vipc_server_->send(yuv, &extra);
|
||||
} else {
|
||||
rError("camera[%d] failed to get frame: %lu", cam.type, eidx.getSegmentId());
|
||||
}
|
||||
|
||||
cam.cached_id = id + 1;
|
||||
cam.cached_seg = eidx.getSegmentNum();
|
||||
cam.cached_buf = read_frame(fr, cam.cached_id);
|
||||
|
||||
--publishing_;
|
||||
}
|
||||
}
|
||||
|
||||
void CameraServer::pushFrame(CameraType type, FrameReader *fr, const cereal::EncodeIndex::Reader &eidx) {
|
||||
auto &cam = cameras_[type];
|
||||
if (cam.width != fr->width || cam.height != fr->height) {
|
||||
cam.width = fr->width;
|
||||
cam.height = fr->height;
|
||||
waitForSent();
|
||||
startVipcServer();
|
||||
}
|
||||
|
||||
++publishing_;
|
||||
cam.queue.push({fr, eidx});
|
||||
}
|
||||
|
||||
void CameraServer::waitForSent() {
|
||||
while (publishing_ > 0) {
|
||||
std::this_thread::yield();
|
||||
}
|
||||
}
|
||||
45
tools/replay/camera.h
Normal file
45
tools/replay/camera.h
Normal file
@@ -0,0 +1,45 @@
|
||||
#pragma once
|
||||
|
||||
#include <unistd.h>
|
||||
|
||||
#include <memory>
|
||||
#include <tuple>
|
||||
#include <utility>
|
||||
|
||||
#include "cereal/visionipc/visionipc_server.h"
|
||||
#include "common/queue.h"
|
||||
#include "tools/replay/framereader.h"
|
||||
#include "tools/replay/logreader.h"
|
||||
|
||||
std::tuple<size_t, size_t, size_t> get_nv12_info(int width, int height);
|
||||
|
||||
class CameraServer {
|
||||
public:
|
||||
CameraServer(std::pair<int, int> camera_size[MAX_CAMERAS] = nullptr);
|
||||
~CameraServer();
|
||||
void pushFrame(CameraType type, FrameReader* fr, const cereal::EncodeIndex::Reader& eidx);
|
||||
void waitForSent();
|
||||
|
||||
protected:
|
||||
struct Camera {
|
||||
CameraType type;
|
||||
VisionStreamType stream_type;
|
||||
int width;
|
||||
int height;
|
||||
std::thread thread;
|
||||
SafeQueue<std::pair<FrameReader*, const cereal::EncodeIndex::Reader>> queue;
|
||||
int cached_id = -1;
|
||||
int cached_seg = -1;
|
||||
VisionBuf * cached_buf;
|
||||
};
|
||||
void startVipcServer();
|
||||
void cameraThread(Camera &cam);
|
||||
|
||||
Camera cameras_[MAX_CAMERAS] = {
|
||||
{.type = RoadCam, .stream_type = VISION_STREAM_ROAD},
|
||||
{.type = DriverCam, .stream_type = VISION_STREAM_DRIVER},
|
||||
{.type = WideRoadCam, .stream_type = VISION_STREAM_WIDE_ROAD},
|
||||
};
|
||||
std::atomic<int> publishing_ = 0;
|
||||
std::unique_ptr<VisionIpcServer> vipc_server_;
|
||||
};
|
||||
374
tools/replay/consoleui.cc
Normal file
374
tools/replay/consoleui.cc
Normal file
@@ -0,0 +1,374 @@
|
||||
#include "tools/replay/consoleui.h"
|
||||
|
||||
#include <initializer_list>
|
||||
#include <string>
|
||||
#include <tuple>
|
||||
#include <utility>
|
||||
|
||||
#include <QApplication>
|
||||
|
||||
#include "common/util.h"
|
||||
#include "common/version.h"
|
||||
|
||||
namespace {
|
||||
|
||||
const int BORDER_SIZE = 3;
|
||||
|
||||
const std::initializer_list<std::pair<std::string, std::string>> keyboard_shortcuts[] = {
|
||||
{
|
||||
{"s", "+10s"},
|
||||
{"shift+s", "-10s"},
|
||||
{"m", "+60s"},
|
||||
{"shift+m", "-60s"},
|
||||
{"space", "Pause/Resume"},
|
||||
{"e", "Next Engagement"},
|
||||
{"d", "Next Disengagement"},
|
||||
{"t", "Next User Tag"},
|
||||
{"i", "Next Info"},
|
||||
{"w", "Next Warning"},
|
||||
{"c", "Next Critical"},
|
||||
},
|
||||
{
|
||||
{"enter", "Enter seek request"},
|
||||
{"+/-", "Playback speed"},
|
||||
{"q", "Exit"},
|
||||
},
|
||||
};
|
||||
|
||||
enum Color {
|
||||
Default,
|
||||
Debug,
|
||||
Yellow,
|
||||
Green,
|
||||
Red,
|
||||
Cyan,
|
||||
BrightWhite,
|
||||
Engaged,
|
||||
Disengaged,
|
||||
};
|
||||
|
||||
void add_str(WINDOW *w, const char *str, Color color = Color::Default, bool bold = false) {
|
||||
if (color != Color::Default) wattron(w, COLOR_PAIR(color));
|
||||
if (bold) wattron(w, A_BOLD);
|
||||
waddstr(w, str);
|
||||
if (bold) wattroff(w, A_BOLD);
|
||||
if (color != Color::Default) wattroff(w, COLOR_PAIR(color));
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
ConsoleUI::ConsoleUI(Replay *replay, QObject *parent) : replay(replay), sm({"carState", "liveParameters"}), QObject(parent) {
|
||||
// Initialize curses
|
||||
initscr();
|
||||
clear();
|
||||
curs_set(false);
|
||||
cbreak(); // Line buffering disabled. pass on everything
|
||||
noecho();
|
||||
keypad(stdscr, true);
|
||||
nodelay(stdscr, true); // non-blocking getchar()
|
||||
|
||||
// Initialize all the colors. https://www.ditig.com/256-colors-cheat-sheet
|
||||
start_color();
|
||||
init_pair(Color::Debug, 246, COLOR_BLACK); // #949494
|
||||
init_pair(Color::Yellow, 184, COLOR_BLACK);
|
||||
init_pair(Color::Red, COLOR_RED, COLOR_BLACK);
|
||||
init_pair(Color::Cyan, COLOR_CYAN, COLOR_BLACK);
|
||||
init_pair(Color::BrightWhite, 15, COLOR_BLACK);
|
||||
init_pair(Color::Disengaged, COLOR_BLUE, COLOR_BLUE);
|
||||
init_pair(Color::Engaged, 28, 28);
|
||||
init_pair(Color::Green, 34, COLOR_BLACK);
|
||||
|
||||
initWindows();
|
||||
|
||||
qRegisterMetaType<uint64_t>("uint64_t");
|
||||
qRegisterMetaType<ReplyMsgType>("ReplyMsgType");
|
||||
installMessageHandler([this](ReplyMsgType type, const std::string msg) {
|
||||
emit logMessageSignal(type, QString::fromStdString(msg));
|
||||
});
|
||||
installDownloadProgressHandler([this](uint64_t cur, uint64_t total, bool success) {
|
||||
emit updateProgressBarSignal(cur, total, success);
|
||||
});
|
||||
|
||||
QObject::connect(replay, &Replay::streamStarted, this, &ConsoleUI::updateSummary);
|
||||
QObject::connect(¬ifier, SIGNAL(activated(int)), SLOT(readyRead()));
|
||||
QObject::connect(this, &ConsoleUI::updateProgressBarSignal, this, &ConsoleUI::updateProgressBar);
|
||||
QObject::connect(this, &ConsoleUI::logMessageSignal, this, &ConsoleUI::logMessage);
|
||||
|
||||
sm_timer.callOnTimeout(this, &ConsoleUI::updateStatus);
|
||||
sm_timer.start(100);
|
||||
getch_timer.start(1000, this);
|
||||
readyRead();
|
||||
}
|
||||
|
||||
ConsoleUI::~ConsoleUI() {
|
||||
endwin();
|
||||
}
|
||||
|
||||
void ConsoleUI::initWindows() {
|
||||
getmaxyx(stdscr, max_height, max_width);
|
||||
w.fill(nullptr);
|
||||
w[Win::Title] = newwin(1, max_width, 0, 0);
|
||||
w[Win::Stats] = newwin(2, max_width - 2 * BORDER_SIZE, 2, BORDER_SIZE);
|
||||
w[Win::Timeline] = newwin(4, max_width - 2 * BORDER_SIZE, 5, BORDER_SIZE);
|
||||
w[Win::TimelineDesc] = newwin(1, 100, 10, BORDER_SIZE);
|
||||
w[Win::CarState] = newwin(3, 100, 12, BORDER_SIZE);
|
||||
w[Win::DownloadBar] = newwin(1, 100, 16, BORDER_SIZE);
|
||||
if (int log_height = max_height - 27; log_height > 4) {
|
||||
w[Win::LogBorder] = newwin(log_height, max_width - 2 * (BORDER_SIZE - 1), 17, BORDER_SIZE - 1);
|
||||
box(w[Win::LogBorder], 0, 0);
|
||||
w[Win::Log] = newwin(log_height - 2, max_width - 2 * BORDER_SIZE, 18, BORDER_SIZE);
|
||||
scrollok(w[Win::Log], true);
|
||||
}
|
||||
w[Win::Help] = newwin(5, max_width - (2 * BORDER_SIZE), max_height - 6, BORDER_SIZE);
|
||||
|
||||
// set the title bar
|
||||
wbkgd(w[Win::Title], A_REVERSE);
|
||||
mvwprintw(w[Win::Title], 0, 3, "openpilot replay %s", COMMA_VERSION);
|
||||
|
||||
// show windows on the real screen
|
||||
refresh();
|
||||
displayTimelineDesc();
|
||||
displayHelp();
|
||||
updateSummary();
|
||||
updateTimeline();
|
||||
for (auto win : w) {
|
||||
if (win) wrefresh(win);
|
||||
}
|
||||
}
|
||||
|
||||
void ConsoleUI::timerEvent(QTimerEvent *ev) {
|
||||
if (ev->timerId() != getch_timer.timerId()) return;
|
||||
|
||||
if (is_term_resized(max_height, max_width)) {
|
||||
for (auto win : w) {
|
||||
if (win) delwin(win);
|
||||
}
|
||||
endwin();
|
||||
clear();
|
||||
refresh();
|
||||
initWindows();
|
||||
rWarning("resize term %dx%d", max_height, max_width);
|
||||
}
|
||||
updateTimeline();
|
||||
}
|
||||
|
||||
void ConsoleUI::updateStatus() {
|
||||
auto write_item = [this](int y, int x, const char *key, const std::string &value, const std::string &unit,
|
||||
bool bold = false, Color color = Color::BrightWhite) {
|
||||
auto win = w[Win::CarState];
|
||||
wmove(win, y, x);
|
||||
add_str(win, key);
|
||||
add_str(win, value.c_str(), color, bold);
|
||||
add_str(win, unit.c_str());
|
||||
};
|
||||
static const std::pair<const char *, Color> status_text[] = {
|
||||
{"loading...", Color::Red},
|
||||
{"playing", Color::Green},
|
||||
{"paused...", Color::Yellow},
|
||||
};
|
||||
|
||||
sm.update(0);
|
||||
|
||||
if (status != Status::Paused) {
|
||||
auto events = replay->events();
|
||||
uint64_t current_mono_time = replay->routeStartTime() + replay->currentSeconds() * 1e9;
|
||||
bool playing = !events->empty() && events->back()->mono_time > current_mono_time;
|
||||
status = playing ? Status::Playing : Status::Waiting;
|
||||
}
|
||||
auto [status_str, status_color] = status_text[status];
|
||||
write_item(0, 0, "STATUS: ", status_str, " ", false, status_color);
|
||||
std::string current_segment = " - " + std::to_string((int)(replay->currentSeconds() / 60));
|
||||
write_item(0, 25, "TIME: ", replay->currentDateTime().toString("ddd MMMM dd hh:mm:ss").toStdString(), current_segment, true);
|
||||
|
||||
auto p = sm["liveParameters"].getLiveParameters();
|
||||
write_item(1, 0, "STIFFNESS: ", util::string_format("%.2f %%", p.getStiffnessFactor() * 100), " ");
|
||||
write_item(1, 25, "SPEED: ", util::string_format("%.2f", sm["carState"].getCarState().getVEgo()), " m/s");
|
||||
write_item(2, 0, "STEER RATIO: ", util::string_format("%.2f", p.getSteerRatio()), "");
|
||||
auto angle_offsets = util::string_format("%.2f|%.2f", p.getAngleOffsetAverageDeg(), p.getAngleOffsetDeg());
|
||||
write_item(2, 25, "ANGLE OFFSET(AVG|INSTANT): ", angle_offsets, " deg");
|
||||
|
||||
wrefresh(w[Win::CarState]);
|
||||
}
|
||||
|
||||
void ConsoleUI::displayHelp() {
|
||||
for (int i = 0; i < std::size(keyboard_shortcuts); ++i) {
|
||||
wmove(w[Win::Help], i * 2, 0);
|
||||
for (auto &[key, desc] : keyboard_shortcuts[i]) {
|
||||
wattron(w[Win::Help], A_REVERSE);
|
||||
waddstr(w[Win::Help], (' ' + key + ' ').c_str());
|
||||
wattroff(w[Win::Help], A_REVERSE);
|
||||
waddstr(w[Win::Help], (' ' + desc + ' ').c_str());
|
||||
}
|
||||
}
|
||||
wrefresh(w[Win::Help]);
|
||||
}
|
||||
|
||||
void ConsoleUI::displayTimelineDesc() {
|
||||
std::tuple<Color, const char *, bool> indicators[]{
|
||||
{Color::Engaged, " Engaged ", false},
|
||||
{Color::Disengaged, " Disengaged ", false},
|
||||
{Color::Green, " Info ", true},
|
||||
{Color::Yellow, " Warning ", true},
|
||||
{Color::Red, " Critical ", true},
|
||||
{Color::Cyan, " User Tag ", true},
|
||||
};
|
||||
for (auto [color, name, bold] : indicators) {
|
||||
add_str(w[Win::TimelineDesc], "__", color, bold);
|
||||
add_str(w[Win::TimelineDesc], name);
|
||||
}
|
||||
}
|
||||
|
||||
void ConsoleUI::logMessage(ReplyMsgType type, const QString &msg) {
|
||||
if (auto win = w[Win::Log]) {
|
||||
Color color = Color::Default;
|
||||
if (type == ReplyMsgType::Debug) {
|
||||
color = Color::Debug;
|
||||
} else if (type == ReplyMsgType::Warning) {
|
||||
color = Color::Yellow;
|
||||
} else if (type == ReplyMsgType::Critical) {
|
||||
color = Color::Red;
|
||||
}
|
||||
add_str(win, qPrintable(msg + "\n"), color);
|
||||
wrefresh(win);
|
||||
}
|
||||
}
|
||||
|
||||
void ConsoleUI::updateProgressBar(uint64_t cur, uint64_t total, bool success) {
|
||||
werase(w[Win::DownloadBar]);
|
||||
if (success && cur < total) {
|
||||
const int width = 35;
|
||||
const float progress = cur / (double)total;
|
||||
const int pos = width * progress;
|
||||
wprintw(w[Win::DownloadBar], "Downloading [%s>%s] %d%% %s", std::string(pos, '=').c_str(),
|
||||
std::string(width - pos, ' ').c_str(), int(progress * 100.0), formattedDataSize(total).c_str());
|
||||
}
|
||||
wrefresh(w[Win::DownloadBar]);
|
||||
}
|
||||
|
||||
void ConsoleUI::updateSummary() {
|
||||
const auto &route = replay->route();
|
||||
mvwprintw(w[Win::Stats], 0, 0, "Route: %s, %lu segments", qPrintable(route->name()), route->segments().size());
|
||||
mvwprintw(w[Win::Stats], 1, 0, "Car Fingerprint: %s", replay->carFingerprint().c_str());
|
||||
wrefresh(w[Win::Stats]);
|
||||
}
|
||||
|
||||
void ConsoleUI::updateTimeline() {
|
||||
auto win = w[Win::Timeline];
|
||||
int width = getmaxx(win);
|
||||
werase(win);
|
||||
|
||||
wattron(win, COLOR_PAIR(Color::Disengaged));
|
||||
mvwhline(win, 1, 0, ' ', width);
|
||||
mvwhline(win, 2, 0, ' ', width);
|
||||
wattroff(win, COLOR_PAIR(Color::Disengaged));
|
||||
|
||||
const int total_sec = replay->totalSeconds();
|
||||
for (auto [begin, end, type] : replay->getTimeline()) {
|
||||
int start_pos = (begin / total_sec) * width;
|
||||
int end_pos = (end / total_sec) * width;
|
||||
if (type == TimelineType::Engaged) {
|
||||
mvwchgat(win, 1, start_pos, end_pos - start_pos + 1, A_COLOR, Color::Engaged, NULL);
|
||||
mvwchgat(win, 2, start_pos, end_pos - start_pos + 1, A_COLOR, Color::Engaged, NULL);
|
||||
} else if (type == TimelineType::UserFlag) {
|
||||
mvwchgat(win, 3, start_pos, end_pos - start_pos + 1, ACS_S3, Color::Cyan, NULL);
|
||||
} else {
|
||||
auto color_id = Color::Green;
|
||||
if (type != TimelineType::AlertInfo) {
|
||||
color_id = type == TimelineType::AlertWarning ? Color::Yellow : Color::Red;
|
||||
}
|
||||
mvwchgat(win, 3, start_pos, end_pos - start_pos + 1, ACS_S3, color_id, NULL);
|
||||
}
|
||||
}
|
||||
|
||||
int cur_pos = ((double)replay->currentSeconds() / total_sec) * width;
|
||||
wattron(win, COLOR_PAIR(Color::BrightWhite));
|
||||
mvwaddch(win, 0, cur_pos, ACS_VLINE);
|
||||
mvwaddch(win, 3, cur_pos, ACS_VLINE);
|
||||
wattroff(win, COLOR_PAIR(Color::BrightWhite));
|
||||
wrefresh(win);
|
||||
}
|
||||
|
||||
void ConsoleUI::readyRead() {
|
||||
int c;
|
||||
while ((c = getch()) != ERR) {
|
||||
handleKey(c);
|
||||
}
|
||||
}
|
||||
|
||||
void ConsoleUI::pauseReplay(bool pause) {
|
||||
replay->pause(pause);
|
||||
status = pause ? Status::Paused : Status::Waiting;
|
||||
}
|
||||
|
||||
void ConsoleUI::handleKey(char c) {
|
||||
if (c == '\n') {
|
||||
// pause the replay and blocking getchar()
|
||||
pauseReplay(true);
|
||||
updateStatus();
|
||||
getch_timer.stop();
|
||||
curs_set(true);
|
||||
nodelay(stdscr, false);
|
||||
|
||||
// Wait for user input
|
||||
rWarning("Waiting for input...");
|
||||
int y = getmaxy(stdscr) - 9;
|
||||
move(y, BORDER_SIZE);
|
||||
add_str(stdscr, "Enter seek request: ", Color::BrightWhite, true);
|
||||
refresh();
|
||||
|
||||
// Seek to choice
|
||||
echo();
|
||||
int choice = 0;
|
||||
scanw((char *)"%d", &choice);
|
||||
noecho();
|
||||
pauseReplay(false);
|
||||
replay->seekTo(choice, false);
|
||||
|
||||
// Clean up and turn off the blocking mode
|
||||
move(y, 0);
|
||||
clrtoeol();
|
||||
nodelay(stdscr, true);
|
||||
curs_set(false);
|
||||
refresh();
|
||||
getch_timer.start(1000, this);
|
||||
|
||||
} else if (c == '+' || c == '=') {
|
||||
auto it = std::upper_bound(speed_array.begin(), speed_array.end(), replay->getSpeed());
|
||||
if (it != speed_array.end()) {
|
||||
rWarning("playback speed: %.1fx", *it);
|
||||
replay->setSpeed(*it);
|
||||
}
|
||||
} else if (c == '_' || c == '-') {
|
||||
auto it = std::lower_bound(speed_array.begin(), speed_array.end(), replay->getSpeed());
|
||||
if (it != speed_array.begin()) {
|
||||
auto prev = std::prev(it);
|
||||
rWarning("playback speed: %.1fx", *prev);
|
||||
replay->setSpeed(*prev);
|
||||
}
|
||||
} else if (c == 'e') {
|
||||
replay->seekToFlag(FindFlag::nextEngagement);
|
||||
} else if (c == 'd') {
|
||||
replay->seekToFlag(FindFlag::nextDisEngagement);
|
||||
} else if (c == 't') {
|
||||
replay->seekToFlag(FindFlag::nextUserFlag);
|
||||
} else if (c == 'i') {
|
||||
replay->seekToFlag(FindFlag::nextInfo);
|
||||
} else if (c == 'w') {
|
||||
replay->seekToFlag(FindFlag::nextWarning);
|
||||
} else if (c == 'c') {
|
||||
replay->seekToFlag(FindFlag::nextCritical);
|
||||
} else if (c == 'm') {
|
||||
replay->seekTo(+60, true);
|
||||
} else if (c == 'M') {
|
||||
replay->seekTo(-60, true);
|
||||
} else if (c == 's') {
|
||||
replay->seekTo(+10, true);
|
||||
} else if (c == 'S') {
|
||||
replay->seekTo(-10, true);
|
||||
} else if (c == ' ') {
|
||||
pauseReplay(!replay->isPaused());
|
||||
} else if (c == 'q' || c == 'Q') {
|
||||
replay->stop();
|
||||
qApp->exit();
|
||||
}
|
||||
}
|
||||
51
tools/replay/consoleui.h
Normal file
51
tools/replay/consoleui.h
Normal file
@@ -0,0 +1,51 @@
|
||||
#pragma once
|
||||
|
||||
#include <array>
|
||||
#include <QBasicTimer>
|
||||
#include <QObject>
|
||||
#include <QSocketNotifier>
|
||||
#include <QTimer>
|
||||
#include <QTimerEvent>
|
||||
|
||||
#include "tools/replay/replay.h"
|
||||
#include <ncurses.h>
|
||||
|
||||
class ConsoleUI : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
ConsoleUI(Replay *replay, QObject *parent = 0);
|
||||
~ConsoleUI();
|
||||
inline static const std::array speed_array = {0.2f, 0.5f, 1.0f, 2.0f, 3.0f};
|
||||
|
||||
private:
|
||||
void initWindows();
|
||||
void handleKey(char c);
|
||||
void displayHelp();
|
||||
void displayTimelineDesc();
|
||||
void updateTimeline();
|
||||
void updateSummary();
|
||||
void updateStatus();
|
||||
void pauseReplay(bool pause);
|
||||
|
||||
enum Status { Waiting, Playing, Paused };
|
||||
enum Win { Title, Stats, Log, LogBorder, DownloadBar, Timeline, TimelineDesc, Help, CarState, Max};
|
||||
std::array<WINDOW*, Win::Max> w{};
|
||||
SubMaster sm;
|
||||
Replay *replay;
|
||||
QBasicTimer getch_timer;
|
||||
QTimer sm_timer;
|
||||
QSocketNotifier notifier{0, QSocketNotifier::Read, this};
|
||||
int max_width, max_height;
|
||||
Status status = Status::Waiting;
|
||||
|
||||
signals:
|
||||
void updateProgressBarSignal(uint64_t cur, uint64_t total, bool success);
|
||||
void logMessageSignal(ReplyMsgType type, const QString &msg);
|
||||
|
||||
private slots:
|
||||
void readyRead();
|
||||
void timerEvent(QTimerEvent *ev);
|
||||
void updateProgressBar(uint64_t cur, uint64_t total, bool success);
|
||||
void logMessage(ReplyMsgType type, const QString &msg);
|
||||
};
|
||||
46
tools/replay/filereader.cc
Normal file
46
tools/replay/filereader.cc
Normal file
@@ -0,0 +1,46 @@
|
||||
#include "tools/replay/filereader.h"
|
||||
|
||||
#include <fstream>
|
||||
|
||||
#include "common/util.h"
|
||||
#include "system/hardware/hw.h"
|
||||
#include "tools/replay/util.h"
|
||||
|
||||
std::string cacheFilePath(const std::string &url) {
|
||||
static std::string cache_path = [] {
|
||||
const std::string comma_cache = Path::download_cache_root();
|
||||
util::create_directories(comma_cache, 0755);
|
||||
return comma_cache.back() == '/' ? comma_cache : comma_cache + "/";
|
||||
}();
|
||||
|
||||
return cache_path + sha256(getUrlWithoutQuery(url));
|
||||
}
|
||||
|
||||
std::string FileReader::read(const std::string &file, std::atomic<bool> *abort) {
|
||||
const bool is_remote = file.find("https://") == 0;
|
||||
const std::string local_file = is_remote ? cacheFilePath(file) : file;
|
||||
std::string result;
|
||||
|
||||
if ((!is_remote || cache_to_local_) && util::file_exists(local_file)) {
|
||||
result = util::read_file(local_file);
|
||||
} else if (is_remote) {
|
||||
result = download(file, abort);
|
||||
if (cache_to_local_ && !result.empty()) {
|
||||
std::ofstream fs(local_file, std::ios::binary | std::ios::out);
|
||||
fs.write(result.data(), result.size());
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
std::string FileReader::download(const std::string &url, std::atomic<bool> *abort) {
|
||||
for (int i = 0; i <= max_retries_ && !(abort && *abort); ++i) {
|
||||
if (i > 0) rWarning("download failed, retrying %d", i);
|
||||
|
||||
std::string result = httpGet(url, chunk_size_, abort);
|
||||
if (!result.empty()) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
20
tools/replay/filereader.h
Normal file
20
tools/replay/filereader.h
Normal file
@@ -0,0 +1,20 @@
|
||||
#pragma once
|
||||
|
||||
#include <atomic>
|
||||
#include <string>
|
||||
|
||||
class FileReader {
|
||||
public:
|
||||
FileReader(bool cache_to_local, size_t chunk_size = 0, int retries = 3)
|
||||
: cache_to_local_(cache_to_local), chunk_size_(chunk_size), max_retries_(retries) {}
|
||||
virtual ~FileReader() {}
|
||||
std::string read(const std::string &file, std::atomic<bool> *abort = nullptr);
|
||||
|
||||
private:
|
||||
std::string download(const std::string &url, std::atomic<bool> *abort);
|
||||
size_t chunk_size_;
|
||||
int max_retries_;
|
||||
bool cache_to_local_;
|
||||
};
|
||||
|
||||
std::string cacheFilePath(const std::string &url);
|
||||
251
tools/replay/framereader.cc
Normal file
251
tools/replay/framereader.cc
Normal file
@@ -0,0 +1,251 @@
|
||||
#include "tools/replay/framereader.h"
|
||||
#include "tools/replay/util.h"
|
||||
|
||||
#include <cassert>
|
||||
#include <algorithm>
|
||||
#include "third_party/libyuv/include/libyuv.h"
|
||||
|
||||
#ifdef __APPLE__
|
||||
#define HW_DEVICE_TYPE AV_HWDEVICE_TYPE_VIDEOTOOLBOX
|
||||
#define HW_PIX_FMT AV_PIX_FMT_VIDEOTOOLBOX
|
||||
#else
|
||||
#define HW_DEVICE_TYPE AV_HWDEVICE_TYPE_CUDA
|
||||
#define HW_PIX_FMT AV_PIX_FMT_CUDA
|
||||
#endif
|
||||
|
||||
namespace {
|
||||
|
||||
struct buffer_data {
|
||||
const uint8_t *data;
|
||||
int64_t offset;
|
||||
size_t size;
|
||||
};
|
||||
|
||||
int readPacket(void *opaque, uint8_t *buf, int buf_size) {
|
||||
struct buffer_data *bd = (struct buffer_data *)opaque;
|
||||
assert(bd->offset <= bd->size);
|
||||
buf_size = std::min((size_t)buf_size, (size_t)(bd->size - bd->offset));
|
||||
if (!buf_size) return AVERROR_EOF;
|
||||
|
||||
memcpy(buf, bd->data + bd->offset, buf_size);
|
||||
bd->offset += buf_size;
|
||||
return buf_size;
|
||||
}
|
||||
|
||||
enum AVPixelFormat get_hw_format(AVCodecContext *ctx, const enum AVPixelFormat *pix_fmts) {
|
||||
enum AVPixelFormat *hw_pix_fmt = reinterpret_cast<enum AVPixelFormat *>(ctx->opaque);
|
||||
for (const enum AVPixelFormat *p = pix_fmts; *p != -1; p++) {
|
||||
if (*p == *hw_pix_fmt) return *p;
|
||||
}
|
||||
rWarning("Please run replay with the --no-hw-decoder flag!");
|
||||
// fallback to YUV420p
|
||||
*hw_pix_fmt = AV_PIX_FMT_NONE;
|
||||
return AV_PIX_FMT_YUV420P;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
FrameReader::FrameReader() {
|
||||
av_log_set_level(AV_LOG_QUIET);
|
||||
}
|
||||
|
||||
FrameReader::~FrameReader() {
|
||||
for (AVPacket *pkt : packets) {
|
||||
av_packet_free(&pkt);
|
||||
}
|
||||
|
||||
if (decoder_ctx) avcodec_free_context(&decoder_ctx);
|
||||
if (input_ctx) avformat_close_input(&input_ctx);
|
||||
if (hw_device_ctx) av_buffer_unref(&hw_device_ctx);
|
||||
|
||||
if (avio_ctx_) {
|
||||
av_freep(&avio_ctx_->buffer);
|
||||
avio_context_free(&avio_ctx_);
|
||||
}
|
||||
}
|
||||
|
||||
bool FrameReader::load(const std::string &url, bool no_hw_decoder, std::atomic<bool> *abort, bool local_cache, int chunk_size, int retries) {
|
||||
FileReader f(local_cache, chunk_size, retries);
|
||||
std::string data = f.read(url, abort);
|
||||
if (data.empty()) {
|
||||
rWarning("URL %s returned no data", url.c_str());
|
||||
return false;
|
||||
}
|
||||
|
||||
return load((std::byte *)data.data(), data.size(), no_hw_decoder, abort);
|
||||
}
|
||||
|
||||
bool FrameReader::load(const std::byte *data, size_t size, bool no_hw_decoder, std::atomic<bool> *abort) {
|
||||
input_ctx = avformat_alloc_context();
|
||||
if (!input_ctx) {
|
||||
rError("Error calling avformat_alloc_context");
|
||||
return false;
|
||||
}
|
||||
|
||||
struct buffer_data bd = {
|
||||
.data = (const uint8_t*)data,
|
||||
.offset = 0,
|
||||
.size = size,
|
||||
};
|
||||
const int avio_ctx_buffer_size = 64 * 1024;
|
||||
unsigned char *avio_ctx_buffer = (unsigned char *)av_malloc(avio_ctx_buffer_size);
|
||||
avio_ctx_ = avio_alloc_context(avio_ctx_buffer, avio_ctx_buffer_size, 0, &bd, readPacket, nullptr, nullptr);
|
||||
input_ctx->pb = avio_ctx_;
|
||||
|
||||
input_ctx->probesize = 10 * 1024 * 1024; // 10MB
|
||||
int ret = avformat_open_input(&input_ctx, nullptr, nullptr, nullptr);
|
||||
if (ret != 0) {
|
||||
char err_str[1024] = {0};
|
||||
av_strerror(ret, err_str, std::size(err_str));
|
||||
rError("Error loading video - %s", err_str);
|
||||
return false;
|
||||
}
|
||||
|
||||
ret = avformat_find_stream_info(input_ctx, nullptr);
|
||||
if (ret < 0) {
|
||||
rError("cannot find a video stream in the input file");
|
||||
return false;
|
||||
}
|
||||
|
||||
AVStream *video = input_ctx->streams[0];
|
||||
const AVCodec *decoder = avcodec_find_decoder(video->codecpar->codec_id);
|
||||
if (!decoder) return false;
|
||||
|
||||
decoder_ctx = avcodec_alloc_context3(decoder);
|
||||
ret = avcodec_parameters_to_context(decoder_ctx, video->codecpar);
|
||||
if (ret != 0) return false;
|
||||
|
||||
width = (decoder_ctx->width + 3) & ~3;
|
||||
height = decoder_ctx->height;
|
||||
|
||||
if (has_hw_decoder && !no_hw_decoder) {
|
||||
if (!initHardwareDecoder(HW_DEVICE_TYPE)) {
|
||||
rWarning("No device with hardware decoder found. fallback to CPU decoding.");
|
||||
}
|
||||
}
|
||||
|
||||
ret = avcodec_open2(decoder_ctx, decoder, nullptr);
|
||||
if (ret < 0) {
|
||||
rError("avcodec_open2 failed %d", ret);
|
||||
return false;
|
||||
}
|
||||
|
||||
packets.reserve(60 * 20); // 20fps, one minute
|
||||
while (!(abort && *abort)) {
|
||||
AVPacket *pkt = av_packet_alloc();
|
||||
ret = av_read_frame(input_ctx, pkt);
|
||||
if (ret < 0) {
|
||||
av_packet_free(&pkt);
|
||||
valid_ = (ret == AVERROR_EOF);
|
||||
break;
|
||||
}
|
||||
packets.push_back(pkt);
|
||||
// some stream seems to contain no keyframes
|
||||
key_frames_count_ += pkt->flags & AV_PKT_FLAG_KEY;
|
||||
}
|
||||
valid_ = valid_ && !packets.empty();
|
||||
return valid_;
|
||||
}
|
||||
|
||||
bool FrameReader::initHardwareDecoder(AVHWDeviceType hw_device_type) {
|
||||
for (int i = 0;; i++) {
|
||||
const AVCodecHWConfig *config = avcodec_get_hw_config(decoder_ctx->codec, i);
|
||||
if (!config) {
|
||||
rWarning("decoder %s does not support hw device type %s.", decoder_ctx->codec->name,
|
||||
av_hwdevice_get_type_name(hw_device_type));
|
||||
return false;
|
||||
}
|
||||
if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX && config->device_type == hw_device_type) {
|
||||
hw_pix_fmt = config->pix_fmt;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
int ret = av_hwdevice_ctx_create(&hw_device_ctx, hw_device_type, nullptr, nullptr, 0);
|
||||
if (ret < 0) {
|
||||
hw_pix_fmt = AV_PIX_FMT_NONE;
|
||||
has_hw_decoder = false;
|
||||
rWarning("Failed to create specified HW device %d.", ret);
|
||||
return false;
|
||||
}
|
||||
|
||||
decoder_ctx->hw_device_ctx = av_buffer_ref(hw_device_ctx);
|
||||
decoder_ctx->opaque = &hw_pix_fmt;
|
||||
decoder_ctx->get_format = get_hw_format;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool FrameReader::get(int idx, VisionBuf *buf) {
|
||||
assert(buf != nullptr);
|
||||
if (!valid_ || idx < 0 || idx >= packets.size()) {
|
||||
return false;
|
||||
}
|
||||
return decode(idx, buf);
|
||||
}
|
||||
|
||||
bool FrameReader::decode(int idx, VisionBuf *buf) {
|
||||
int from_idx = idx;
|
||||
if (idx != prev_idx + 1 && key_frames_count_ > 1) {
|
||||
// seeking to the nearest key frame
|
||||
for (int i = idx; i >= 0; --i) {
|
||||
if (packets[i]->flags & AV_PKT_FLAG_KEY) {
|
||||
from_idx = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
prev_idx = idx;
|
||||
|
||||
for (int i = from_idx; i <= idx; ++i) {
|
||||
AVFrame *f = decodeFrame(packets[i]);
|
||||
if (f && i == idx) {
|
||||
return copyBuffers(f, buf);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
AVFrame *FrameReader::decodeFrame(AVPacket *pkt) {
|
||||
int ret = avcodec_send_packet(decoder_ctx, pkt);
|
||||
if (ret < 0) {
|
||||
rError("Error sending a packet for decoding: %d", ret);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
av_frame_.reset(av_frame_alloc());
|
||||
ret = avcodec_receive_frame(decoder_ctx, av_frame_.get());
|
||||
if (ret != 0) {
|
||||
rError("avcodec_receive_frame error: %d", ret);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (av_frame_->format == hw_pix_fmt) {
|
||||
hw_frame.reset(av_frame_alloc());
|
||||
if ((ret = av_hwframe_transfer_data(hw_frame.get(), av_frame_.get(), 0)) < 0) {
|
||||
rError("error transferring the data from GPU to CPU");
|
||||
return nullptr;
|
||||
}
|
||||
return hw_frame.get();
|
||||
} else {
|
||||
return av_frame_.get();
|
||||
}
|
||||
}
|
||||
|
||||
bool FrameReader::copyBuffers(AVFrame *f, VisionBuf *buf) {
|
||||
assert(f != nullptr && buf != nullptr);
|
||||
if (hw_pix_fmt == HW_PIX_FMT) {
|
||||
for (int i = 0; i < height/2; i++) {
|
||||
memcpy(buf->y + (i*2 + 0)*buf->stride, f->data[0] + (i*2 + 0)*f->linesize[0], width);
|
||||
memcpy(buf->y + (i*2 + 1)*buf->stride, f->data[0] + (i*2 + 1)*f->linesize[0], width);
|
||||
memcpy(buf->uv + i*buf->stride, f->data[1] + i*f->linesize[1], width);
|
||||
}
|
||||
} else {
|
||||
libyuv::I420ToNV12(f->data[0], f->linesize[0],
|
||||
f->data[1], f->linesize[1],
|
||||
f->data[2], f->linesize[2],
|
||||
buf->y, buf->stride,
|
||||
buf->uv, buf->stride,
|
||||
width, height);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
51
tools/replay/framereader.h
Normal file
51
tools/replay/framereader.h
Normal file
@@ -0,0 +1,51 @@
|
||||
#pragma once
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "cereal/visionipc/visionbuf.h"
|
||||
#include "tools/replay/filereader.h"
|
||||
|
||||
extern "C" {
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavformat/avformat.h>
|
||||
}
|
||||
|
||||
struct AVFrameDeleter {
|
||||
void operator()(AVFrame* frame) const { av_frame_free(&frame); }
|
||||
};
|
||||
|
||||
class FrameReader {
|
||||
public:
|
||||
FrameReader();
|
||||
~FrameReader();
|
||||
bool load(const std::string &url, bool no_hw_decoder = false, std::atomic<bool> *abort = nullptr, bool local_cache = false,
|
||||
int chunk_size = -1, int retries = 0);
|
||||
bool load(const std::byte *data, size_t size, bool no_hw_decoder = false, std::atomic<bool> *abort = nullptr);
|
||||
bool get(int idx, VisionBuf *buf);
|
||||
int getYUVSize() const { return width * height * 3 / 2; }
|
||||
size_t getFrameCount() const { return packets.size(); }
|
||||
bool valid() const { return valid_; }
|
||||
|
||||
int width = 0, height = 0;
|
||||
|
||||
private:
|
||||
bool initHardwareDecoder(AVHWDeviceType hw_device_type);
|
||||
bool decode(int idx, VisionBuf *buf);
|
||||
AVFrame * decodeFrame(AVPacket *pkt);
|
||||
bool copyBuffers(AVFrame *f, VisionBuf *buf);
|
||||
|
||||
std::vector<AVPacket*> packets;
|
||||
std::unique_ptr<AVFrame, AVFrameDeleter>av_frame_, hw_frame;
|
||||
AVFormatContext *input_ctx = nullptr;
|
||||
AVCodecContext *decoder_ctx = nullptr;
|
||||
int key_frames_count_ = 0;
|
||||
bool valid_ = false;
|
||||
AVIOContext *avio_ctx_ = nullptr;
|
||||
|
||||
AVPixelFormat hw_pix_fmt = AV_PIX_FMT_NONE;
|
||||
AVBufferRef *hw_device_ctx = nullptr;
|
||||
int prev_idx = -1;
|
||||
inline static std::atomic<bool> has_hw_decoder = true;
|
||||
};
|
||||
98
tools/replay/logreader.cc
Normal file
98
tools/replay/logreader.cc
Normal file
@@ -0,0 +1,98 @@
|
||||
#include "tools/replay/logreader.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include "tools/replay/filereader.h"
|
||||
#include "tools/replay/util.h"
|
||||
|
||||
Event::Event(const kj::ArrayPtr<const capnp::word> &amsg, bool frame) : reader(amsg), frame(frame) {
|
||||
words = kj::ArrayPtr<const capnp::word>(amsg.begin(), reader.getEnd());
|
||||
event = reader.getRoot<cereal::Event>();
|
||||
which = event.which();
|
||||
mono_time = event.getLogMonoTime();
|
||||
|
||||
// 1) Send video data at t=timestampEof/timestampSof
|
||||
// 2) Send encodeIndex packet at t=logMonoTime
|
||||
if (frame) {
|
||||
auto idx = capnp::AnyStruct::Reader(event).getPointerSection()[0].getAs<cereal::EncodeIndex>();
|
||||
// C2 only has eof set, and some older routes have neither
|
||||
uint64_t sof = idx.getTimestampSof();
|
||||
uint64_t eof = idx.getTimestampEof();
|
||||
if (sof > 0) {
|
||||
mono_time = sof;
|
||||
} else if (eof > 0) {
|
||||
mono_time = eof;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// class LogReader
|
||||
|
||||
LogReader::LogReader(size_t memory_pool_block_size) {
|
||||
#ifdef HAS_MEMORY_RESOURCE
|
||||
const size_t buf_size = sizeof(Event) * memory_pool_block_size;
|
||||
mbr_ = std::make_unique<std::pmr::monotonic_buffer_resource>(buf_size);
|
||||
#endif
|
||||
events.reserve(memory_pool_block_size);
|
||||
}
|
||||
|
||||
LogReader::~LogReader() {
|
||||
for (Event *e : events) {
|
||||
delete e;
|
||||
}
|
||||
}
|
||||
|
||||
bool LogReader::load(const std::string &url, std::atomic<bool> *abort, bool local_cache, int chunk_size, int retries) {
|
||||
raw_ = FileReader(local_cache, chunk_size, retries).read(url, abort);
|
||||
if (raw_.empty()) return false;
|
||||
|
||||
if (url.find(".bz2") != std::string::npos) {
|
||||
raw_ = decompressBZ2(raw_, abort);
|
||||
if (raw_.empty()) return false;
|
||||
}
|
||||
return parse(abort);
|
||||
}
|
||||
|
||||
bool LogReader::load(const std::byte *data, size_t size, std::atomic<bool> *abort) {
|
||||
raw_.assign((const char *)data, size);
|
||||
return parse(abort);
|
||||
}
|
||||
|
||||
bool LogReader::parse(std::atomic<bool> *abort) {
|
||||
try {
|
||||
kj::ArrayPtr<const capnp::word> words((const capnp::word *)raw_.data(), raw_.size() / sizeof(capnp::word));
|
||||
while (words.size() > 0 && !(abort && *abort)) {
|
||||
#ifdef HAS_MEMORY_RESOURCE
|
||||
Event *evt = new (mbr_.get()) Event(words);
|
||||
#else
|
||||
Event *evt = new Event(words);
|
||||
#endif
|
||||
// Add encodeIdx packet again as a frame packet for the video stream
|
||||
if (evt->which == cereal::Event::ROAD_ENCODE_IDX ||
|
||||
evt->which == cereal::Event::DRIVER_ENCODE_IDX ||
|
||||
evt->which == cereal::Event::WIDE_ROAD_ENCODE_IDX) {
|
||||
|
||||
#ifdef HAS_MEMORY_RESOURCE
|
||||
Event *frame_evt = new (mbr_.get()) Event(words, true);
|
||||
#else
|
||||
Event *frame_evt = new Event(words, true);
|
||||
#endif
|
||||
|
||||
events.push_back(frame_evt);
|
||||
}
|
||||
|
||||
words = kj::arrayPtr(evt->reader.getEnd(), words.end());
|
||||
events.push_back(evt);
|
||||
}
|
||||
} catch (const kj::Exception &e) {
|
||||
rWarning("failed to parse log : %s", e.getDescription().cStr());
|
||||
if (!events.empty()) {
|
||||
rWarning("read %zu events from corrupt log", events.size());
|
||||
}
|
||||
}
|
||||
|
||||
if (!events.empty() && !(abort && *abort)) {
|
||||
std::sort(events.begin(), events.end(), Event::lessThan());
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
67
tools/replay/logreader.h
Normal file
67
tools/replay/logreader.h
Normal file
@@ -0,0 +1,67 @@
|
||||
#pragma once
|
||||
|
||||
#if __has_include(<memory_resource>)
|
||||
#define HAS_MEMORY_RESOURCE 1
|
||||
#include <memory_resource>
|
||||
#endif
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "cereal/gen/cpp/log.capnp.h"
|
||||
#include "system/camerad/cameras/camera_common.h"
|
||||
|
||||
const CameraType ALL_CAMERAS[] = {RoadCam, DriverCam, WideRoadCam};
|
||||
const int MAX_CAMERAS = std::size(ALL_CAMERAS);
|
||||
const int DEFAULT_EVENT_MEMORY_POOL_BLOCK_SIZE = 65000;
|
||||
|
||||
class Event {
|
||||
public:
|
||||
Event(cereal::Event::Which which, uint64_t mono_time) : reader(kj::ArrayPtr<capnp::word>{}) {
|
||||
// construct a dummy Event for binary search, e.g std::upper_bound
|
||||
this->which = which;
|
||||
this->mono_time = mono_time;
|
||||
}
|
||||
Event(const kj::ArrayPtr<const capnp::word> &amsg, bool frame = false);
|
||||
inline kj::ArrayPtr<const capnp::byte> bytes() const { return words.asBytes(); }
|
||||
|
||||
struct lessThan {
|
||||
inline bool operator()(const Event *l, const Event *r) {
|
||||
return l->mono_time < r->mono_time || (l->mono_time == r->mono_time && l->which < r->which);
|
||||
}
|
||||
};
|
||||
|
||||
#if HAS_MEMORY_RESOURCE
|
||||
void *operator new(size_t size, std::pmr::monotonic_buffer_resource *mbr) {
|
||||
return mbr->allocate(size);
|
||||
}
|
||||
void operator delete(void *ptr) {
|
||||
// No-op. memory used by EventMemoryPool increases monotonically until the logReader is destroyed.
|
||||
}
|
||||
#endif
|
||||
|
||||
uint64_t mono_time;
|
||||
cereal::Event::Which which;
|
||||
cereal::Event::Reader event;
|
||||
capnp::FlatArrayMessageReader reader;
|
||||
kj::ArrayPtr<const capnp::word> words;
|
||||
bool frame;
|
||||
};
|
||||
|
||||
class LogReader {
|
||||
public:
|
||||
LogReader(size_t memory_pool_block_size = DEFAULT_EVENT_MEMORY_POOL_BLOCK_SIZE);
|
||||
~LogReader();
|
||||
bool load(const std::string &url, std::atomic<bool> *abort = nullptr,
|
||||
bool local_cache = false, int chunk_size = -1, int retries = 0);
|
||||
bool load(const std::byte *data, size_t size, std::atomic<bool> *abort = nullptr);
|
||||
std::vector<Event*> events;
|
||||
|
||||
private:
|
||||
bool parse(std::atomic<bool> *abort);
|
||||
std::string raw_;
|
||||
#ifdef HAS_MEMORY_RESOURCE
|
||||
std::unique_ptr<std::pmr::monotonic_buffer_resource> mbr_;
|
||||
#endif
|
||||
};
|
||||
83
tools/replay/main.cc
Normal file
83
tools/replay/main.cc
Normal file
@@ -0,0 +1,83 @@
|
||||
#include <QApplication>
|
||||
#include <QCommandLineParser>
|
||||
|
||||
#include "common/prefix.h"
|
||||
#include "tools/replay/consoleui.h"
|
||||
#include "tools/replay/replay.h"
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
#ifdef __APPLE__
|
||||
// With all sockets opened, we might hit the default limit of 256 on macOS
|
||||
util::set_file_descriptor_limit(1024);
|
||||
#endif
|
||||
|
||||
QCoreApplication app(argc, argv);
|
||||
|
||||
const std::tuple<QString, REPLAY_FLAGS, QString> flags[] = {
|
||||
{"dcam", REPLAY_FLAG_DCAM, "load driver camera"},
|
||||
{"ecam", REPLAY_FLAG_ECAM, "load wide road camera"},
|
||||
{"no-loop", REPLAY_FLAG_NO_LOOP, "stop at the end of the route"},
|
||||
{"no-cache", REPLAY_FLAG_NO_FILE_CACHE, "turn off local cache"},
|
||||
{"qcam", REPLAY_FLAG_QCAMERA, "load qcamera"},
|
||||
{"no-hw-decoder", REPLAY_FLAG_NO_HW_DECODER, "disable HW video decoding"},
|
||||
{"no-vipc", REPLAY_FLAG_NO_VIPC, "do not output video"},
|
||||
{"all", REPLAY_FLAG_ALL_SERVICES, "do output all messages including uiDebug, userFlag"
|
||||
". this may causes issues when used along with UI"}
|
||||
};
|
||||
|
||||
QCommandLineParser parser;
|
||||
parser.setApplicationDescription("Mock openpilot components by publishing logged messages.");
|
||||
parser.addHelpOption();
|
||||
parser.addPositionalArgument("route", "the drive to replay. find your drives at connect.comma.ai");
|
||||
parser.addOption({{"a", "allow"}, "whitelist of services to send", "allow"});
|
||||
parser.addOption({{"b", "block"}, "blacklist of services to send", "block"});
|
||||
parser.addOption({{"c", "cache"}, "cache <n> segments in memory. default is 5", "n"});
|
||||
parser.addOption({{"s", "start"}, "start from <seconds>", "seconds"});
|
||||
parser.addOption({"x", QString("playback <speed>. between %1 - %2")
|
||||
.arg(ConsoleUI::speed_array.front()).arg(ConsoleUI::speed_array.back()), "speed"});
|
||||
parser.addOption({"demo", "use a demo route instead of providing your own"});
|
||||
parser.addOption({"data_dir", "local directory with routes", "data_dir"});
|
||||
parser.addOption({"prefix", "set OPENPILOT_PREFIX", "prefix"});
|
||||
for (auto &[name, _, desc] : flags) {
|
||||
parser.addOption({name, desc});
|
||||
}
|
||||
|
||||
parser.process(app);
|
||||
const QStringList args = parser.positionalArguments();
|
||||
if (args.empty() && !parser.isSet("demo")) {
|
||||
parser.showHelp();
|
||||
}
|
||||
|
||||
const QString route = args.empty() ? DEMO_ROUTE : args.first();
|
||||
QStringList allow = parser.value("allow").isEmpty() ? QStringList{} : parser.value("allow").split(",");
|
||||
QStringList block = parser.value("block").isEmpty() ? QStringList{} : parser.value("block").split(",");
|
||||
|
||||
uint32_t replay_flags = REPLAY_FLAG_NONE;
|
||||
for (const auto &[name, flag, _] : flags) {
|
||||
if (parser.isSet(name)) {
|
||||
replay_flags |= flag;
|
||||
}
|
||||
}
|
||||
|
||||
std::unique_ptr<OpenpilotPrefix> op_prefix;
|
||||
auto prefix = parser.value("prefix");
|
||||
if (!prefix.isEmpty()) {
|
||||
op_prefix.reset(new OpenpilotPrefix(prefix.toStdString()));
|
||||
}
|
||||
|
||||
Replay *replay = new Replay(route, allow, block, nullptr, replay_flags, parser.value("data_dir"), &app);
|
||||
if (!parser.value("c").isEmpty()) {
|
||||
replay->setSegmentCacheLimit(parser.value("c").toInt());
|
||||
}
|
||||
if (!parser.value("x").isEmpty()) {
|
||||
replay->setSpeed(std::clamp(parser.value("x").toFloat(),
|
||||
ConsoleUI::speed_array.front(), ConsoleUI::speed_array.back()));
|
||||
}
|
||||
if (!replay->load()) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
ConsoleUI console_ui(replay);
|
||||
replay->start(parser.value("start").toInt());
|
||||
return app.exec();
|
||||
}
|
||||
428
tools/replay/replay.cc
Normal file
428
tools/replay/replay.cc
Normal file
@@ -0,0 +1,428 @@
|
||||
#include "tools/replay/replay.h"
|
||||
|
||||
#include <QDebug>
|
||||
#include <QtConcurrent>
|
||||
|
||||
#include <capnp/dynamic.h>
|
||||
#include "cereal/services.h"
|
||||
#include "common/params.h"
|
||||
#include "common/timing.h"
|
||||
#include "tools/replay/util.h"
|
||||
|
||||
Replay::Replay(QString route, QStringList allow, QStringList block, SubMaster *sm_,
|
||||
uint32_t flags, QString data_dir, QObject *parent) : sm(sm_), flags_(flags), QObject(parent) {
|
||||
if (!(flags_ & REPLAY_FLAG_ALL_SERVICES)) {
|
||||
block << "uiDebug" << "userFlag";
|
||||
}
|
||||
auto event_struct = capnp::Schema::from<cereal::Event>().asStruct();
|
||||
sockets_.resize(event_struct.getUnionFields().size());
|
||||
for (const auto &[name, _] : services) {
|
||||
if (!block.contains(name.c_str()) && (allow.empty() || allow.contains(name.c_str()))) {
|
||||
uint16_t which = event_struct.getFieldByName(name).getProto().getDiscriminantValue();
|
||||
sockets_[which] = name.c_str();
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<const char *> s;
|
||||
std::copy_if(sockets_.begin(), sockets_.end(), std::back_inserter(s),
|
||||
[](const char *name) { return name != nullptr; });
|
||||
qDebug() << "services " << s;
|
||||
qDebug() << "loading route " << route;
|
||||
|
||||
if (sm == nullptr) {
|
||||
pm = std::make_unique<PubMaster>(s);
|
||||
}
|
||||
route_ = std::make_unique<Route>(route, data_dir);
|
||||
events_ = std::make_unique<std::vector<Event *>>();
|
||||
new_events_ = std::make_unique<std::vector<Event *>>();
|
||||
}
|
||||
|
||||
Replay::~Replay() {
|
||||
stop();
|
||||
}
|
||||
|
||||
void Replay::stop() {
|
||||
if (!stream_thread_ && segments_.empty()) return;
|
||||
|
||||
rInfo("shutdown: in progress...");
|
||||
if (stream_thread_ != nullptr) {
|
||||
exit_ = updating_events_ = true;
|
||||
stream_cv_.notify_one();
|
||||
stream_thread_->quit();
|
||||
stream_thread_->wait();
|
||||
stream_thread_ = nullptr;
|
||||
}
|
||||
camera_server_.reset(nullptr);
|
||||
timeline_future.waitForFinished();
|
||||
segments_.clear();
|
||||
rInfo("shutdown: done");
|
||||
}
|
||||
|
||||
bool Replay::load() {
|
||||
if (!route_->load()) {
|
||||
qCritical() << "failed to load route" << route_->name()
|
||||
<< "from" << (route_->dir().isEmpty() ? "server" : route_->dir());
|
||||
return false;
|
||||
}
|
||||
|
||||
for (auto &[n, f] : route_->segments()) {
|
||||
bool has_log = !f.rlog.isEmpty() || !f.qlog.isEmpty();
|
||||
bool has_video = !f.road_cam.isEmpty() || !f.qcamera.isEmpty();
|
||||
if (has_log && (has_video || hasFlag(REPLAY_FLAG_NO_VIPC))) {
|
||||
segments_.insert({n, nullptr});
|
||||
}
|
||||
}
|
||||
if (segments_.empty()) {
|
||||
qCritical() << "no valid segments in route" << route_->name();
|
||||
return false;
|
||||
}
|
||||
rInfo("load route %s with %zu valid segments", qPrintable(route_->name()), segments_.size());
|
||||
return true;
|
||||
}
|
||||
|
||||
void Replay::start(int seconds) {
|
||||
seekTo(route_->identifier().segment_id * 60 + seconds, false);
|
||||
}
|
||||
|
||||
void Replay::updateEvents(const std::function<bool()> &lambda) {
|
||||
// set updating_events to true to force stream thread release the lock and wait for events_updated.
|
||||
updating_events_ = true;
|
||||
{
|
||||
std::unique_lock lk(stream_lock_);
|
||||
events_updated_ = lambda();
|
||||
updating_events_ = false;
|
||||
}
|
||||
stream_cv_.notify_one();
|
||||
}
|
||||
|
||||
void Replay::seekTo(double seconds, bool relative) {
|
||||
seconds = relative ? seconds + currentSeconds() : seconds;
|
||||
updateEvents([&]() {
|
||||
seconds = std::max(double(0.0), seconds);
|
||||
int seg = (int)seconds / 60;
|
||||
if (segments_.find(seg) == segments_.end()) {
|
||||
rWarning("can't seek to %d s segment %d is invalid", seconds, seg);
|
||||
return true;
|
||||
}
|
||||
|
||||
rInfo("seeking to %d s, segment %d", (int)seconds, seg);
|
||||
current_segment_ = seg;
|
||||
cur_mono_time_ = route_start_ts_ + seconds * 1e9;
|
||||
emit seekedTo(seconds);
|
||||
return isSegmentMerged(seg);
|
||||
});
|
||||
queueSegment();
|
||||
}
|
||||
|
||||
void Replay::seekToFlag(FindFlag flag) {
|
||||
if (auto next = find(flag)) {
|
||||
seekTo(*next - 2, false); // seek to 2 seconds before next
|
||||
}
|
||||
}
|
||||
|
||||
void Replay::buildTimeline() {
|
||||
uint64_t engaged_begin = 0;
|
||||
bool engaged = false;
|
||||
|
||||
auto alert_status = cereal::ControlsState::AlertStatus::NORMAL;
|
||||
auto alert_size = cereal::ControlsState::AlertSize::NONE;
|
||||
uint64_t alert_begin = 0;
|
||||
std::string alert_type;
|
||||
|
||||
const TimelineType timeline_types[] = {
|
||||
[(int)cereal::ControlsState::AlertStatus::NORMAL] = TimelineType::AlertInfo,
|
||||
[(int)cereal::ControlsState::AlertStatus::USER_PROMPT] = TimelineType::AlertWarning,
|
||||
[(int)cereal::ControlsState::AlertStatus::CRITICAL] = TimelineType::AlertCritical,
|
||||
};
|
||||
|
||||
const auto &route_segments = route_->segments();
|
||||
for (auto it = route_segments.cbegin(); it != route_segments.cend() && !exit_; ++it) {
|
||||
std::shared_ptr<LogReader> log(new LogReader());
|
||||
if (!log->load(it->second.qlog.toStdString(), &exit_, !hasFlag(REPLAY_FLAG_NO_FILE_CACHE), 0, 3)) continue;
|
||||
|
||||
for (const Event *e : log->events) {
|
||||
if (e->which == cereal::Event::Which::CONTROLS_STATE) {
|
||||
auto cs = e->event.getControlsState();
|
||||
|
||||
if (engaged != cs.getEnabled()) {
|
||||
if (engaged) {
|
||||
std::lock_guard lk(timeline_lock);
|
||||
timeline.push_back({toSeconds(engaged_begin), toSeconds(e->mono_time), TimelineType::Engaged});
|
||||
}
|
||||
engaged_begin = e->mono_time;
|
||||
engaged = cs.getEnabled();
|
||||
}
|
||||
|
||||
if (alert_type != cs.getAlertType().cStr() || alert_status != cs.getAlertStatus()) {
|
||||
if (!alert_type.empty() && alert_size != cereal::ControlsState::AlertSize::NONE) {
|
||||
std::lock_guard lk(timeline_lock);
|
||||
timeline.push_back({toSeconds(alert_begin), toSeconds(e->mono_time), timeline_types[(int)alert_status]});
|
||||
}
|
||||
alert_begin = e->mono_time;
|
||||
alert_type = cs.getAlertType().cStr();
|
||||
alert_size = cs.getAlertSize();
|
||||
alert_status = cs.getAlertStatus();
|
||||
}
|
||||
} else if (e->which == cereal::Event::Which::USER_FLAG) {
|
||||
std::lock_guard lk(timeline_lock);
|
||||
timeline.push_back({toSeconds(e->mono_time), toSeconds(e->mono_time), TimelineType::UserFlag});
|
||||
}
|
||||
}
|
||||
std::sort(timeline.begin(), timeline.end(), [](auto &l, auto &r) { return std::get<2>(l) < std::get<2>(r); });
|
||||
emit qLogLoaded(it->first, log);
|
||||
}
|
||||
}
|
||||
|
||||
std::optional<uint64_t> Replay::find(FindFlag flag) {
|
||||
int cur_ts = currentSeconds();
|
||||
for (auto [start_ts, end_ts, type] : getTimeline()) {
|
||||
if (type == TimelineType::Engaged) {
|
||||
if (flag == FindFlag::nextEngagement && start_ts > cur_ts) {
|
||||
return start_ts;
|
||||
} else if (flag == FindFlag::nextDisEngagement && end_ts > cur_ts) {
|
||||
return end_ts;
|
||||
}
|
||||
} else if (start_ts > cur_ts) {
|
||||
if ((flag == FindFlag::nextUserFlag && type == TimelineType::UserFlag) ||
|
||||
(flag == FindFlag::nextInfo && type == TimelineType::AlertInfo) ||
|
||||
(flag == FindFlag::nextWarning && type == TimelineType::AlertWarning) ||
|
||||
(flag == FindFlag::nextCritical && type == TimelineType::AlertCritical)) {
|
||||
return start_ts;
|
||||
}
|
||||
}
|
||||
}
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
void Replay::pause(bool pause) {
|
||||
updateEvents([=]() {
|
||||
rWarning("%s at %.2f s", pause ? "paused..." : "resuming", currentSeconds());
|
||||
paused_ = pause;
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
void Replay::setCurrentSegment(int n) {
|
||||
if (current_segment_.exchange(n) != n) {
|
||||
QMetaObject::invokeMethod(this, &Replay::queueSegment, Qt::QueuedConnection);
|
||||
}
|
||||
}
|
||||
|
||||
void Replay::segmentLoadFinished(bool success) {
|
||||
if (!success) {
|
||||
Segment *seg = qobject_cast<Segment *>(sender());
|
||||
rWarning("failed to load segment %d, removing it from current replay list", seg->seg_num);
|
||||
updateEvents([&]() {
|
||||
segments_.erase(seg->seg_num);
|
||||
return true;
|
||||
});
|
||||
}
|
||||
queueSegment();
|
||||
}
|
||||
|
||||
void Replay::queueSegment() {
|
||||
auto cur = segments_.lower_bound(current_segment_.load());
|
||||
if (cur == segments_.end()) return;
|
||||
|
||||
auto begin = std::prev(cur, std::min<int>(segment_cache_limit / 2, std::distance(segments_.begin(), cur)));
|
||||
auto end = std::next(begin, std::min<int>(segment_cache_limit, segments_.size()));
|
||||
// load one segment at a time
|
||||
auto it = std::find_if(cur, end, [](auto &it) { return !it.second || !it.second->isLoaded(); });
|
||||
if (it != end && !it->second) {
|
||||
rDebug("loading segment %d...", it->first);
|
||||
it->second = std::make_unique<Segment>(it->first, route_->at(it->first), flags_);
|
||||
QObject::connect(it->second.get(), &Segment::loadFinished, this, &Replay::segmentLoadFinished);
|
||||
}
|
||||
|
||||
mergeSegments(begin, end);
|
||||
|
||||
// free segments out of current semgnt window.
|
||||
std::for_each(segments_.begin(), begin, [](auto &e) { e.second.reset(nullptr); });
|
||||
std::for_each(end, segments_.end(), [](auto &e) { e.second.reset(nullptr); });
|
||||
|
||||
// start stream thread
|
||||
const auto &cur_segment = cur->second;
|
||||
if (stream_thread_ == nullptr && cur_segment->isLoaded()) {
|
||||
startStream(cur_segment.get());
|
||||
emit streamStarted();
|
||||
}
|
||||
}
|
||||
|
||||
void Replay::mergeSegments(const SegmentMap::iterator &begin, const SegmentMap::iterator &end) {
|
||||
std::vector<int> segments_need_merge;
|
||||
size_t new_events_size = 0;
|
||||
for (auto it = begin; it != end; ++it) {
|
||||
if (it->second && it->second->isLoaded()) {
|
||||
segments_need_merge.push_back(it->first);
|
||||
new_events_size += it->second->log->events.size();
|
||||
}
|
||||
}
|
||||
|
||||
if (segments_need_merge != segments_merged_) {
|
||||
std::string s;
|
||||
for (int i = 0; i < segments_need_merge.size(); ++i) {
|
||||
s += std::to_string(segments_need_merge[i]);
|
||||
if (i != segments_need_merge.size() - 1) s += ", ";
|
||||
}
|
||||
rDebug("merge segments %s", s.c_str());
|
||||
new_events_->clear();
|
||||
new_events_->reserve(new_events_size);
|
||||
for (int n : segments_need_merge) {
|
||||
size_t size = new_events_->size();
|
||||
const auto &events = segments_[n]->log->events;
|
||||
std::copy_if(events.begin(), events.end(), std::back_inserter(*new_events_),
|
||||
[this](auto e) { return e->which < sockets_.size() && sockets_[e->which] != nullptr; });
|
||||
std::inplace_merge(new_events_->begin(), new_events_->begin() + size, new_events_->end(), Event::lessThan());
|
||||
}
|
||||
|
||||
if (stream_thread_) {
|
||||
emit segmentsMerged();
|
||||
}
|
||||
updateEvents([&]() {
|
||||
events_.swap(new_events_);
|
||||
segments_merged_ = segments_need_merge;
|
||||
// Do not wake up the stream thread if the current segment has not been merged.
|
||||
return isSegmentMerged(current_segment_) || (segments_.count(current_segment_) == 0);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
void Replay::startStream(const Segment *cur_segment) {
|
||||
const auto &events = cur_segment->log->events;
|
||||
|
||||
// each segment has an INIT_DATA
|
||||
route_start_ts_ = events.front()->mono_time;
|
||||
cur_mono_time_ += route_start_ts_ - 1;
|
||||
|
||||
// write CarParams
|
||||
auto it = std::find_if(events.begin(), events.end(), [](auto e) { return e->which == cereal::Event::Which::CAR_PARAMS; });
|
||||
if (it != events.end()) {
|
||||
car_fingerprint_ = (*it)->event.getCarParams().getCarFingerprint();
|
||||
capnp::MallocMessageBuilder builder;
|
||||
builder.setRoot((*it)->event.getCarParams());
|
||||
auto words = capnp::messageToFlatArray(builder);
|
||||
auto bytes = words.asBytes();
|
||||
Params().put("CarParams", (const char *)bytes.begin(), bytes.size());
|
||||
Params().put("CarParamsPersistent", (const char *)bytes.begin(), bytes.size());
|
||||
} else {
|
||||
rWarning("failed to read CarParams from current segment");
|
||||
}
|
||||
|
||||
// start camera server
|
||||
if (!hasFlag(REPLAY_FLAG_NO_VIPC)) {
|
||||
std::pair<int, int> camera_size[MAX_CAMERAS] = {};
|
||||
for (auto type : ALL_CAMERAS) {
|
||||
if (auto &fr = cur_segment->frames[type]) {
|
||||
camera_size[type] = {fr->width, fr->height};
|
||||
}
|
||||
}
|
||||
camera_server_ = std::make_unique<CameraServer>(camera_size);
|
||||
}
|
||||
|
||||
emit segmentsMerged();
|
||||
// start stream thread
|
||||
stream_thread_ = new QThread();
|
||||
QObject::connect(stream_thread_, &QThread::started, [=]() { stream(); });
|
||||
QObject::connect(stream_thread_, &QThread::finished, stream_thread_, &QThread::deleteLater);
|
||||
stream_thread_->start();
|
||||
|
||||
timeline_future = QtConcurrent::run(this, &Replay::buildTimeline);
|
||||
}
|
||||
|
||||
void Replay::publishMessage(const Event *e) {
|
||||
if (event_filter && event_filter(e, filter_opaque)) return;
|
||||
|
||||
if (sm == nullptr) {
|
||||
auto bytes = e->bytes();
|
||||
int ret = pm->send(sockets_[e->which], (capnp::byte *)bytes.begin(), bytes.size());
|
||||
if (ret == -1) {
|
||||
rWarning("stop publishing %s due to multiple publishers error", sockets_[e->which]);
|
||||
sockets_[e->which] = nullptr;
|
||||
}
|
||||
} else {
|
||||
sm->update_msgs(nanos_since_boot(), {{sockets_[e->which], e->event}});
|
||||
}
|
||||
}
|
||||
|
||||
void Replay::publishFrame(const Event *e) {
|
||||
static const std::map<cereal::Event::Which, CameraType> cam_types{
|
||||
{cereal::Event::ROAD_ENCODE_IDX, RoadCam},
|
||||
{cereal::Event::DRIVER_ENCODE_IDX, DriverCam},
|
||||
{cereal::Event::WIDE_ROAD_ENCODE_IDX, WideRoadCam},
|
||||
};
|
||||
if ((e->which == cereal::Event::DRIVER_ENCODE_IDX && !hasFlag(REPLAY_FLAG_DCAM)) ||
|
||||
(e->which == cereal::Event::WIDE_ROAD_ENCODE_IDX && !hasFlag(REPLAY_FLAG_ECAM))) {
|
||||
return;
|
||||
}
|
||||
auto eidx = capnp::AnyStruct::Reader(e->event).getPointerSection()[0].getAs<cereal::EncodeIndex>();
|
||||
if (eidx.getType() == cereal::EncodeIndex::Type::FULL_H_E_V_C && isSegmentMerged(eidx.getSegmentNum())) {
|
||||
CameraType cam = cam_types.at(e->which);
|
||||
camera_server_->pushFrame(cam, segments_[eidx.getSegmentNum()]->frames[cam].get(), eidx);
|
||||
}
|
||||
}
|
||||
|
||||
void Replay::stream() {
|
||||
cereal::Event::Which cur_which = cereal::Event::Which::INIT_DATA;
|
||||
double prev_replay_speed = speed_;
|
||||
std::unique_lock lk(stream_lock_);
|
||||
|
||||
while (true) {
|
||||
stream_cv_.wait(lk, [=]() { return exit_ || (events_updated_ && !paused_); });
|
||||
events_updated_ = false;
|
||||
if (exit_) break;
|
||||
|
||||
Event cur_event(cur_which, cur_mono_time_);
|
||||
auto eit = std::upper_bound(events_->begin(), events_->end(), &cur_event, Event::lessThan());
|
||||
if (eit == events_->end()) {
|
||||
rInfo("waiting for events...");
|
||||
continue;
|
||||
}
|
||||
|
||||
uint64_t evt_start_ts = cur_mono_time_;
|
||||
uint64_t loop_start_ts = nanos_since_boot();
|
||||
|
||||
for (auto end = events_->end(); !updating_events_ && eit != end; ++eit) {
|
||||
const Event *evt = (*eit);
|
||||
cur_which = evt->which;
|
||||
cur_mono_time_ = evt->mono_time;
|
||||
setCurrentSegment(toSeconds(cur_mono_time_) / 60);
|
||||
|
||||
if (sockets_[cur_which] != nullptr) {
|
||||
// keep time
|
||||
long etime = (cur_mono_time_ - evt_start_ts) / speed_;
|
||||
long rtime = nanos_since_boot() - loop_start_ts;
|
||||
long behind_ns = etime - rtime;
|
||||
// if behind_ns is greater than 1 second, it means that an invalid segment is skipped by seeking/replaying
|
||||
if (behind_ns >= 1 * 1e9 || speed_ != prev_replay_speed) {
|
||||
// reset event start times
|
||||
evt_start_ts = cur_mono_time_;
|
||||
loop_start_ts = nanos_since_boot();
|
||||
prev_replay_speed = speed_;
|
||||
} else if (behind_ns > 0) {
|
||||
precise_nano_sleep(behind_ns);
|
||||
}
|
||||
|
||||
if (!evt->frame) {
|
||||
publishMessage(evt);
|
||||
} else if (camera_server_) {
|
||||
if (speed_ > 1.0) {
|
||||
camera_server_->waitForSent();
|
||||
}
|
||||
publishFrame(evt);
|
||||
}
|
||||
}
|
||||
}
|
||||
// wait for frame to be sent before unlock.(frameReader may be deleted after unlock)
|
||||
if (camera_server_) {
|
||||
camera_server_->waitForSent();
|
||||
}
|
||||
|
||||
if (eit == events_->end() && !hasFlag(REPLAY_FLAG_NO_LOOP)) {
|
||||
int last_segment = segments_.empty() ? 0 : segments_.rbegin()->first;
|
||||
if (current_segment_ >= last_segment && isSegmentMerged(last_segment)) {
|
||||
rInfo("reaches the end of route, restart from beginning");
|
||||
QMetaObject::invokeMethod(this, std::bind(&Replay::seekTo, this, 0, false), Qt::QueuedConnection);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
146
tools/replay/replay.h
Normal file
146
tools/replay/replay.h
Normal file
@@ -0,0 +1,146 @@
|
||||
#pragma once
|
||||
|
||||
#include <algorithm>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <optional>
|
||||
#include <string>
|
||||
#include <tuple>
|
||||
#include <vector>
|
||||
#include <utility>
|
||||
|
||||
#include <QThread>
|
||||
|
||||
#include "tools/replay/camera.h"
|
||||
#include "tools/replay/route.h"
|
||||
|
||||
const QString DEMO_ROUTE = "a2a0ccea32023010|2023-07-27--13-01-19";
|
||||
|
||||
// one segment uses about 100M of memory
|
||||
constexpr int MIN_SEGMENTS_CACHE = 5;
|
||||
|
||||
enum REPLAY_FLAGS {
|
||||
REPLAY_FLAG_NONE = 0x0000,
|
||||
REPLAY_FLAG_DCAM = 0x0002,
|
||||
REPLAY_FLAG_ECAM = 0x0004,
|
||||
REPLAY_FLAG_NO_LOOP = 0x0010,
|
||||
REPLAY_FLAG_NO_FILE_CACHE = 0x0020,
|
||||
REPLAY_FLAG_QCAMERA = 0x0040,
|
||||
REPLAY_FLAG_NO_HW_DECODER = 0x0100,
|
||||
REPLAY_FLAG_NO_VIPC = 0x0400,
|
||||
REPLAY_FLAG_ALL_SERVICES = 0x0800,
|
||||
};
|
||||
|
||||
enum class FindFlag {
|
||||
nextEngagement,
|
||||
nextDisEngagement,
|
||||
nextUserFlag,
|
||||
nextInfo,
|
||||
nextWarning,
|
||||
nextCritical
|
||||
};
|
||||
|
||||
enum class TimelineType { None, Engaged, AlertInfo, AlertWarning, AlertCritical, UserFlag };
|
||||
typedef bool (*replayEventFilter)(const Event *, void *);
|
||||
Q_DECLARE_METATYPE(std::shared_ptr<LogReader>);
|
||||
|
||||
class Replay : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
Replay(QString route, QStringList allow, QStringList block, SubMaster *sm = nullptr,
|
||||
uint32_t flags = REPLAY_FLAG_NONE, QString data_dir = "", QObject *parent = 0);
|
||||
~Replay();
|
||||
bool load();
|
||||
void start(int seconds = 0);
|
||||
void stop();
|
||||
void pause(bool pause);
|
||||
void seekToFlag(FindFlag flag);
|
||||
void seekTo(double seconds, bool relative);
|
||||
inline bool isPaused() const { return paused_; }
|
||||
// the filter is called in streaming thread.try to return quickly from it to avoid blocking streaming.
|
||||
// the filter function must return true if the event should be filtered.
|
||||
// otherwise it must return false.
|
||||
inline void installEventFilter(replayEventFilter filter, void *opaque) {
|
||||
filter_opaque = opaque;
|
||||
event_filter = filter;
|
||||
}
|
||||
inline int segmentCacheLimit() const { return segment_cache_limit; }
|
||||
inline void setSegmentCacheLimit(int n) { segment_cache_limit = std::max(MIN_SEGMENTS_CACHE, n); }
|
||||
inline bool hasFlag(REPLAY_FLAGS flag) const { return flags_ & flag; }
|
||||
inline void addFlag(REPLAY_FLAGS flag) { flags_ |= flag; }
|
||||
inline void removeFlag(REPLAY_FLAGS flag) { flags_ &= ~flag; }
|
||||
inline const Route* route() const { return route_.get(); }
|
||||
inline double currentSeconds() const { return double(cur_mono_time_ - route_start_ts_) / 1e9; }
|
||||
inline QDateTime currentDateTime() const { return route_->datetime().addSecs(currentSeconds()); }
|
||||
inline uint64_t routeStartTime() const { return route_start_ts_; }
|
||||
inline double toSeconds(uint64_t mono_time) const { return (mono_time - route_start_ts_) / 1e9; }
|
||||
inline int totalSeconds() const { return (!segments_.empty()) ? (segments_.rbegin()->first + 1) * 60 : 0; }
|
||||
inline void setSpeed(float speed) { speed_ = speed; }
|
||||
inline float getSpeed() const { return speed_; }
|
||||
inline const std::vector<Event *> *events() const { return events_.get(); }
|
||||
inline const std::map<int, std::unique_ptr<Segment>> &segments() const { return segments_; }
|
||||
inline const std::string &carFingerprint() const { return car_fingerprint_; }
|
||||
inline const std::vector<std::tuple<double, double, TimelineType>> getTimeline() {
|
||||
std::lock_guard lk(timeline_lock);
|
||||
return timeline;
|
||||
}
|
||||
|
||||
signals:
|
||||
void streamStarted();
|
||||
void segmentsMerged();
|
||||
void seekedTo(double sec);
|
||||
void qLogLoaded(int segnum, std::shared_ptr<LogReader> qlog);
|
||||
|
||||
protected slots:
|
||||
void segmentLoadFinished(bool success);
|
||||
|
||||
protected:
|
||||
typedef std::map<int, std::unique_ptr<Segment>> SegmentMap;
|
||||
std::optional<uint64_t> find(FindFlag flag);
|
||||
void startStream(const Segment *cur_segment);
|
||||
void stream();
|
||||
void setCurrentSegment(int n);
|
||||
void queueSegment();
|
||||
void mergeSegments(const SegmentMap::iterator &begin, const SegmentMap::iterator &end);
|
||||
void updateEvents(const std::function<bool()>& lambda);
|
||||
void publishMessage(const Event *e);
|
||||
void publishFrame(const Event *e);
|
||||
void buildTimeline();
|
||||
inline bool isSegmentMerged(int n) {
|
||||
return std::find(segments_merged_.begin(), segments_merged_.end(), n) != segments_merged_.end();
|
||||
}
|
||||
|
||||
QThread *stream_thread_ = nullptr;
|
||||
std::mutex stream_lock_;
|
||||
std::condition_variable stream_cv_;
|
||||
std::atomic<bool> updating_events_ = false;
|
||||
std::atomic<int> current_segment_ = 0;
|
||||
SegmentMap segments_;
|
||||
// the following variables must be protected with stream_lock_
|
||||
std::atomic<bool> exit_ = false;
|
||||
bool paused_ = false;
|
||||
bool events_updated_ = false;
|
||||
uint64_t route_start_ts_ = 0;
|
||||
std::atomic<uint64_t> cur_mono_time_ = 0;
|
||||
std::unique_ptr<std::vector<Event *>> events_;
|
||||
std::unique_ptr<std::vector<Event *>> new_events_;
|
||||
std::vector<int> segments_merged_;
|
||||
|
||||
// messaging
|
||||
SubMaster *sm = nullptr;
|
||||
std::unique_ptr<PubMaster> pm;
|
||||
std::vector<const char*> sockets_;
|
||||
std::unique_ptr<Route> route_;
|
||||
std::unique_ptr<CameraServer> camera_server_;
|
||||
std::atomic<uint32_t> flags_ = REPLAY_FLAG_NONE;
|
||||
|
||||
std::mutex timeline_lock;
|
||||
QFuture<void> timeline_future;
|
||||
std::vector<std::tuple<double, double, TimelineType>> timeline;
|
||||
std::string car_fingerprint_;
|
||||
std::atomic<float> speed_ = 1.0;
|
||||
replayEventFilter event_filter = nullptr;
|
||||
void *filter_opaque = nullptr;
|
||||
int segment_cache_limit = MIN_SEGMENTS_CACHE;
|
||||
};
|
||||
144
tools/replay/route.cc
Normal file
144
tools/replay/route.cc
Normal file
@@ -0,0 +1,144 @@
|
||||
#include "tools/replay/route.h"
|
||||
|
||||
#include <QDir>
|
||||
#include <QEventLoop>
|
||||
#include <QJsonArray>
|
||||
#include <QJsonDocument>
|
||||
#include <QRegExp>
|
||||
#include <QtConcurrent>
|
||||
#include <array>
|
||||
|
||||
#include "selfdrive/ui/qt/api.h"
|
||||
#include "system/hardware/hw.h"
|
||||
#include "tools/replay/replay.h"
|
||||
#include "tools/replay/util.h"
|
||||
|
||||
Route::Route(const QString &route, const QString &data_dir) : data_dir_(data_dir) {
|
||||
route_ = parseRoute(route);
|
||||
}
|
||||
|
||||
RouteIdentifier Route::parseRoute(const QString &str) {
|
||||
QRegExp rx(R"(^(?:([a-z0-9]{16})([|_/]))?(\d{4}-\d{2}-\d{2}--\d{2}-\d{2}-\d{2})(?:(--|/)(\d*))?$)");
|
||||
if (rx.indexIn(str) == -1) return {};
|
||||
|
||||
const QStringList list = rx.capturedTexts();
|
||||
return {.dongle_id = list[1], .timestamp = list[3], .segment_id = list[5].toInt(), .str = list[1] + "|" + list[3]};
|
||||
}
|
||||
|
||||
bool Route::load() {
|
||||
if (route_.str.isEmpty() || (data_dir_.isEmpty() && route_.dongle_id.isEmpty())) {
|
||||
rInfo("invalid route format");
|
||||
return false;
|
||||
}
|
||||
date_time_ = QDateTime::fromString(route_.timestamp, "yyyy-MM-dd--HH-mm-ss");
|
||||
return data_dir_.isEmpty() ? loadFromServer() : loadFromLocal();
|
||||
}
|
||||
|
||||
bool Route::loadFromServer() {
|
||||
QEventLoop loop;
|
||||
HttpRequest http(nullptr, !Hardware::PC());
|
||||
QObject::connect(&http, &HttpRequest::requestDone, [&](const QString &json, bool success, QNetworkReply::NetworkError error) {
|
||||
if (error == QNetworkReply::ContentAccessDenied || error == QNetworkReply::AuthenticationRequiredError) {
|
||||
qWarning() << ">> Unauthorized. Authenticate with tools/lib/auth.py <<";
|
||||
}
|
||||
|
||||
loop.exit(success ? loadFromJson(json) : 0);
|
||||
});
|
||||
http.sendRequest(CommaApi::BASE_URL + "/v1/route/" + route_.str + "/files");
|
||||
return loop.exec();
|
||||
}
|
||||
|
||||
bool Route::loadFromJson(const QString &json) {
|
||||
QRegExp rx(R"(\/(\d+)\/)");
|
||||
for (const auto &value : QJsonDocument::fromJson(json.trimmed().toUtf8()).object()) {
|
||||
for (const auto &url : value.toArray()) {
|
||||
QString url_str = url.toString();
|
||||
if (rx.indexIn(url_str) != -1) {
|
||||
addFileToSegment(rx.cap(1).toInt(), url_str);
|
||||
}
|
||||
}
|
||||
}
|
||||
return !segments_.empty();
|
||||
}
|
||||
|
||||
bool Route::loadFromLocal() {
|
||||
QDir log_dir(data_dir_);
|
||||
for (const auto &folder : log_dir.entryList(QDir::Dirs | QDir::NoDot | QDir::NoDotDot, QDir::NoSort)) {
|
||||
int pos = folder.lastIndexOf("--");
|
||||
if (pos != -1 && folder.left(pos) == route_.timestamp) {
|
||||
const int seg_num = folder.mid(pos + 2).toInt();
|
||||
QDir segment_dir(log_dir.filePath(folder));
|
||||
for (const auto &f : segment_dir.entryList(QDir::Files)) {
|
||||
addFileToSegment(seg_num, segment_dir.absoluteFilePath(f));
|
||||
}
|
||||
}
|
||||
}
|
||||
return !segments_.empty();
|
||||
}
|
||||
|
||||
void Route::addFileToSegment(int n, const QString &file) {
|
||||
QString name = QUrl(file).fileName();
|
||||
|
||||
const int pos = name.lastIndexOf("--");
|
||||
name = pos != -1 ? name.mid(pos + 2) : name;
|
||||
|
||||
if (name == "rlog.bz2" || name == "rlog") {
|
||||
segments_[n].rlog = file;
|
||||
} else if (name == "qlog.bz2" || name == "qlog") {
|
||||
segments_[n].qlog = file;
|
||||
} else if (name == "fcamera.hevc") {
|
||||
segments_[n].road_cam = file;
|
||||
} else if (name == "dcamera.hevc") {
|
||||
segments_[n].driver_cam = file;
|
||||
} else if (name == "ecamera.hevc") {
|
||||
segments_[n].wide_road_cam = file;
|
||||
} else if (name == "qcamera.ts") {
|
||||
segments_[n].qcamera = file;
|
||||
}
|
||||
}
|
||||
|
||||
// class Segment
|
||||
|
||||
Segment::Segment(int n, const SegmentFile &files, uint32_t flags) : seg_num(n), flags(flags) {
|
||||
// [RoadCam, DriverCam, WideRoadCam, log]. fallback to qcamera/qlog
|
||||
const std::array file_list = {
|
||||
(flags & REPLAY_FLAG_QCAMERA) || files.road_cam.isEmpty() ? files.qcamera : files.road_cam,
|
||||
flags & REPLAY_FLAG_DCAM ? files.driver_cam : "",
|
||||
flags & REPLAY_FLAG_ECAM ? files.wide_road_cam : "",
|
||||
files.rlog.isEmpty() ? files.qlog : files.rlog,
|
||||
};
|
||||
for (int i = 0; i < file_list.size(); ++i) {
|
||||
if (!file_list[i].isEmpty() && (!(flags & REPLAY_FLAG_NO_VIPC) || i >= MAX_CAMERAS)) {
|
||||
++loading_;
|
||||
synchronizer_.addFuture(QtConcurrent::run(this, &Segment::loadFile, i, file_list[i].toStdString()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Segment::~Segment() {
|
||||
disconnect();
|
||||
abort_ = true;
|
||||
synchronizer_.setCancelOnWait(true);
|
||||
synchronizer_.waitForFinished();
|
||||
}
|
||||
|
||||
void Segment::loadFile(int id, const std::string file) {
|
||||
const bool local_cache = !(flags & REPLAY_FLAG_NO_FILE_CACHE);
|
||||
bool success = false;
|
||||
if (id < MAX_CAMERAS) {
|
||||
frames[id] = std::make_unique<FrameReader>();
|
||||
success = frames[id]->load(file, flags & REPLAY_FLAG_NO_HW_DECODER, &abort_, local_cache, 20 * 1024 * 1024, 3);
|
||||
} else {
|
||||
log = std::make_unique<LogReader>();
|
||||
success = log->load(file, &abort_, local_cache, 0, 3);
|
||||
}
|
||||
|
||||
if (!success) {
|
||||
// abort all loading jobs.
|
||||
abort_ = true;
|
||||
}
|
||||
|
||||
if (--loading_ == 0) {
|
||||
emit loadFinished(!abort_);
|
||||
}
|
||||
}
|
||||
75
tools/replay/route.h
Normal file
75
tools/replay/route.h
Normal file
@@ -0,0 +1,75 @@
|
||||
#pragma once
|
||||
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
#include <QDateTime>
|
||||
#include <QFutureSynchronizer>
|
||||
|
||||
#include "tools/replay/framereader.h"
|
||||
#include "tools/replay/logreader.h"
|
||||
#include "tools/replay/util.h"
|
||||
|
||||
struct RouteIdentifier {
|
||||
QString dongle_id;
|
||||
QString timestamp;
|
||||
int segment_id;
|
||||
QString str;
|
||||
};
|
||||
|
||||
struct SegmentFile {
|
||||
QString rlog;
|
||||
QString qlog;
|
||||
QString road_cam;
|
||||
QString driver_cam;
|
||||
QString wide_road_cam;
|
||||
QString qcamera;
|
||||
};
|
||||
|
||||
class Route {
|
||||
public:
|
||||
Route(const QString &route, const QString &data_dir = {});
|
||||
bool load();
|
||||
inline const QString &name() const { return route_.str; }
|
||||
inline const QDateTime datetime() const { return date_time_; }
|
||||
inline const QString &dir() const { return data_dir_; }
|
||||
inline const RouteIdentifier &identifier() const { return route_; }
|
||||
inline const std::map<int, SegmentFile> &segments() const { return segments_; }
|
||||
inline const SegmentFile &at(int n) { return segments_.at(n); }
|
||||
static RouteIdentifier parseRoute(const QString &str);
|
||||
|
||||
protected:
|
||||
bool loadFromLocal();
|
||||
bool loadFromServer();
|
||||
bool loadFromJson(const QString &json);
|
||||
void addFileToSegment(int seg_num, const QString &file);
|
||||
RouteIdentifier route_ = {};
|
||||
QString data_dir_;
|
||||
std::map<int, SegmentFile> segments_;
|
||||
QDateTime date_time_;
|
||||
};
|
||||
|
||||
class Segment : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
Segment(int n, const SegmentFile &files, uint32_t flags);
|
||||
~Segment();
|
||||
inline bool isLoaded() const { return !loading_ && !abort_; }
|
||||
|
||||
const int seg_num = 0;
|
||||
std::unique_ptr<LogReader> log;
|
||||
std::unique_ptr<FrameReader> frames[MAX_CAMERAS] = {};
|
||||
|
||||
signals:
|
||||
void loadFinished(bool success);
|
||||
|
||||
protected:
|
||||
void loadFile(int id, const std::string file);
|
||||
|
||||
std::atomic<bool> abort_ = false;
|
||||
std::atomic<int> loading_ = 0;
|
||||
QFutureSynchronizer<void> synchronizer_;
|
||||
uint32_t flags;
|
||||
};
|
||||
331
tools/replay/util.cc
Normal file
331
tools/replay/util.cc
Normal file
@@ -0,0 +1,331 @@
|
||||
#include "tools/replay/util.h"
|
||||
|
||||
#include <bzlib.h>
|
||||
#include <curl/curl.h>
|
||||
#include <openssl/sha.h>
|
||||
|
||||
#include <cstdarg>
|
||||
#include <cstring>
|
||||
#include <cassert>
|
||||
#include <cmath>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <map>
|
||||
#include <mutex>
|
||||
#include <numeric>
|
||||
#include <utility>
|
||||
|
||||
#include "common/timing.h"
|
||||
#include "common/util.h"
|
||||
|
||||
ReplayMessageHandler message_handler = nullptr;
|
||||
void installMessageHandler(ReplayMessageHandler handler) { message_handler = handler; }
|
||||
|
||||
void logMessage(ReplyMsgType type, const char *fmt, ...) {
|
||||
static std::mutex lock;
|
||||
std::lock_guard lk(lock);
|
||||
|
||||
char *msg_buf = nullptr;
|
||||
va_list args;
|
||||
va_start(args, fmt);
|
||||
int ret = vasprintf(&msg_buf, fmt, args);
|
||||
va_end(args);
|
||||
if (ret <= 0 || !msg_buf) return;
|
||||
|
||||
if (message_handler) {
|
||||
message_handler(type, msg_buf);
|
||||
} else {
|
||||
if (type == ReplyMsgType::Debug) {
|
||||
std::cout << "\033[38;5;248m" << msg_buf << "\033[00m" << std::endl;
|
||||
} else if (type == ReplyMsgType::Warning) {
|
||||
std::cout << "\033[38;5;227m" << msg_buf << "\033[00m" << std::endl;
|
||||
} else if (type == ReplyMsgType::Critical) {
|
||||
std::cout << "\033[38;5;196m" << msg_buf << "\033[00m" << std::endl;
|
||||
} else {
|
||||
std::cout << msg_buf << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
free(msg_buf);
|
||||
}
|
||||
|
||||
namespace {
|
||||
|
||||
struct CURLGlobalInitializer {
|
||||
CURLGlobalInitializer() { curl_global_init(CURL_GLOBAL_DEFAULT); }
|
||||
~CURLGlobalInitializer() { curl_global_cleanup(); }
|
||||
};
|
||||
|
||||
static CURLGlobalInitializer curl_initializer;
|
||||
|
||||
template <class T>
|
||||
struct MultiPartWriter {
|
||||
T *buf;
|
||||
size_t *total_written;
|
||||
size_t offset;
|
||||
size_t end;
|
||||
|
||||
size_t write(char *data, size_t size, size_t count) {
|
||||
size_t bytes = size * count;
|
||||
if ((offset + bytes) > end) return 0;
|
||||
|
||||
if constexpr (std::is_same<T, std::string>::value) {
|
||||
memcpy(buf->data() + offset, data, bytes);
|
||||
} else if constexpr (std::is_same<T, std::ofstream>::value) {
|
||||
buf->seekp(offset);
|
||||
buf->write(data, bytes);
|
||||
}
|
||||
|
||||
offset += bytes;
|
||||
*total_written += bytes;
|
||||
return bytes;
|
||||
}
|
||||
};
|
||||
|
||||
template <class T>
|
||||
size_t write_cb(char *data, size_t size, size_t count, void *userp) {
|
||||
auto w = (MultiPartWriter<T> *)userp;
|
||||
return w->write(data, size, count);
|
||||
}
|
||||
|
||||
size_t dumy_write_cb(char *data, size_t size, size_t count, void *userp) { return size * count; }
|
||||
|
||||
struct DownloadStats {
|
||||
void installDownloadProgressHandler(DownloadProgressHandler handler) {
|
||||
std::lock_guard lk(lock);
|
||||
download_progress_handler = handler;
|
||||
}
|
||||
|
||||
void add(const std::string &url, uint64_t total_bytes) {
|
||||
std::lock_guard lk(lock);
|
||||
items[url] = {0, total_bytes};
|
||||
}
|
||||
|
||||
void remove(const std::string &url) {
|
||||
std::lock_guard lk(lock);
|
||||
items.erase(url);
|
||||
}
|
||||
|
||||
void update(const std::string &url, uint64_t downloaded, bool success = true) {
|
||||
std::lock_guard lk(lock);
|
||||
items[url].first = downloaded;
|
||||
|
||||
auto stat = std::accumulate(items.begin(), items.end(), std::pair<int, int>{}, [=](auto &a, auto &b){
|
||||
return std::pair{a.first + b.second.first, a.second + b.second.second};
|
||||
});
|
||||
double tm = millis_since_boot();
|
||||
if (download_progress_handler && ((tm - prev_tm) > 500 || !success || stat.first >= stat.second)) {
|
||||
download_progress_handler(stat.first, stat.second, success);
|
||||
prev_tm = tm;
|
||||
}
|
||||
}
|
||||
|
||||
std::mutex lock;
|
||||
std::map<std::string, std::pair<uint64_t, uint64_t>> items;
|
||||
double prev_tm = 0;
|
||||
DownloadProgressHandler download_progress_handler = nullptr;
|
||||
};
|
||||
|
||||
static DownloadStats download_stats;
|
||||
|
||||
} // namespace
|
||||
|
||||
void installDownloadProgressHandler(DownloadProgressHandler handler) {
|
||||
download_stats.installDownloadProgressHandler(handler);
|
||||
}
|
||||
|
||||
std::string formattedDataSize(size_t size) {
|
||||
if (size < 1024) {
|
||||
return std::to_string(size) + " B";
|
||||
} else if (size < 1024 * 1024) {
|
||||
return util::string_format("%.2f KB", (float)size / 1024);
|
||||
} else {
|
||||
return util::string_format("%.2f MB", (float)size / (1024 * 1024));
|
||||
}
|
||||
}
|
||||
|
||||
size_t getRemoteFileSize(const std::string &url, std::atomic<bool> *abort) {
|
||||
CURL *curl = curl_easy_init();
|
||||
if (!curl) return -1;
|
||||
|
||||
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, dumy_write_cb);
|
||||
curl_easy_setopt(curl, CURLOPT_HEADER, 1);
|
||||
curl_easy_setopt(curl, CURLOPT_NOBODY, 1);
|
||||
|
||||
CURLM *cm = curl_multi_init();
|
||||
curl_multi_add_handle(cm, curl);
|
||||
int still_running = 1;
|
||||
while (still_running > 0 && !(abort && *abort)) {
|
||||
CURLMcode mc = curl_multi_perform(cm, &still_running);
|
||||
if (!mc) curl_multi_wait(cm, nullptr, 0, 1000, nullptr);
|
||||
}
|
||||
|
||||
double content_length = -1;
|
||||
curl_easy_getinfo(curl, CURLINFO_CONTENT_LENGTH_DOWNLOAD, &content_length);
|
||||
curl_multi_remove_handle(cm, curl);
|
||||
curl_easy_cleanup(curl);
|
||||
curl_multi_cleanup(cm);
|
||||
return content_length > 0 ? (size_t)content_length : 0;
|
||||
}
|
||||
|
||||
std::string getUrlWithoutQuery(const std::string &url) {
|
||||
size_t idx = url.find("?");
|
||||
return (idx == std::string::npos ? url : url.substr(0, idx));
|
||||
}
|
||||
|
||||
template <class T>
|
||||
bool httpDownload(const std::string &url, T &buf, size_t chunk_size, size_t content_length, std::atomic<bool> *abort) {
|
||||
download_stats.add(url, content_length);
|
||||
|
||||
int parts = 1;
|
||||
if (chunk_size > 0 && content_length > 10 * 1024 * 1024) {
|
||||
parts = std::nearbyint(content_length / (float)chunk_size);
|
||||
parts = std::clamp(parts, 1, 5);
|
||||
}
|
||||
|
||||
CURLM *cm = curl_multi_init();
|
||||
size_t written = 0;
|
||||
std::map<CURL *, MultiPartWriter<T>> writers;
|
||||
const int part_size = content_length / parts;
|
||||
for (int i = 0; i < parts; ++i) {
|
||||
CURL *eh = curl_easy_init();
|
||||
writers[eh] = {
|
||||
.buf = &buf,
|
||||
.total_written = &written,
|
||||
.offset = (size_t)(i * part_size),
|
||||
.end = i == parts - 1 ? content_length : (i + 1) * part_size,
|
||||
};
|
||||
curl_easy_setopt(eh, CURLOPT_WRITEFUNCTION, write_cb<T>);
|
||||
curl_easy_setopt(eh, CURLOPT_WRITEDATA, (void *)(&writers[eh]));
|
||||
curl_easy_setopt(eh, CURLOPT_URL, url.c_str());
|
||||
curl_easy_setopt(eh, CURLOPT_RANGE, util::string_format("%d-%d", writers[eh].offset, writers[eh].end - 1).c_str());
|
||||
curl_easy_setopt(eh, CURLOPT_HTTPGET, 1);
|
||||
curl_easy_setopt(eh, CURLOPT_NOSIGNAL, 1);
|
||||
curl_easy_setopt(eh, CURLOPT_FOLLOWLOCATION, 1);
|
||||
|
||||
curl_multi_add_handle(cm, eh);
|
||||
}
|
||||
|
||||
int still_running = 1;
|
||||
while (still_running > 0 && !(abort && *abort)) {
|
||||
curl_multi_wait(cm, nullptr, 0, 1000, nullptr);
|
||||
curl_multi_perform(cm, &still_running);
|
||||
download_stats.update(url, written);
|
||||
}
|
||||
|
||||
CURLMsg *msg;
|
||||
int msgs_left = -1;
|
||||
int complete = 0;
|
||||
while ((msg = curl_multi_info_read(cm, &msgs_left)) && !(abort && *abort)) {
|
||||
if (msg->msg == CURLMSG_DONE) {
|
||||
if (msg->data.result == CURLE_OK) {
|
||||
long res_status = 0;
|
||||
curl_easy_getinfo(msg->easy_handle, CURLINFO_RESPONSE_CODE, &res_status);
|
||||
if (res_status == 206) {
|
||||
complete++;
|
||||
} else {
|
||||
rWarning("Download failed: http error code: %d", res_status);
|
||||
}
|
||||
} else {
|
||||
rWarning("Download failed: connection failure: %d", msg->data.result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool success = complete == parts;
|
||||
download_stats.update(url, written, success);
|
||||
download_stats.remove(url);
|
||||
|
||||
for (const auto &[e, w] : writers) {
|
||||
curl_multi_remove_handle(cm, e);
|
||||
curl_easy_cleanup(e);
|
||||
}
|
||||
curl_multi_cleanup(cm);
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
std::string httpGet(const std::string &url, size_t chunk_size, std::atomic<bool> *abort) {
|
||||
size_t size = getRemoteFileSize(url, abort);
|
||||
if (size == 0) return {};
|
||||
|
||||
std::string result(size, '\0');
|
||||
return httpDownload(url, result, chunk_size, size, abort) ? result : "";
|
||||
}
|
||||
|
||||
bool httpDownload(const std::string &url, const std::string &file, size_t chunk_size, std::atomic<bool> *abort) {
|
||||
size_t size = getRemoteFileSize(url, abort);
|
||||
if (size == 0) return false;
|
||||
|
||||
std::ofstream of(file, std::ios::binary | std::ios::out);
|
||||
of.seekp(size - 1).write("\0", 1);
|
||||
return httpDownload(url, of, chunk_size, size, abort);
|
||||
}
|
||||
|
||||
std::string decompressBZ2(const std::string &in, std::atomic<bool> *abort) {
|
||||
return decompressBZ2((std::byte *)in.data(), in.size(), abort);
|
||||
}
|
||||
|
||||
std::string decompressBZ2(const std::byte *in, size_t in_size, std::atomic<bool> *abort) {
|
||||
if (in_size == 0) return {};
|
||||
|
||||
bz_stream strm = {};
|
||||
int bzerror = BZ2_bzDecompressInit(&strm, 0, 0);
|
||||
assert(bzerror == BZ_OK);
|
||||
|
||||
strm.next_in = (char *)in;
|
||||
strm.avail_in = in_size;
|
||||
std::string out(in_size * 5, '\0');
|
||||
do {
|
||||
strm.next_out = (char *)(&out[strm.total_out_lo32]);
|
||||
strm.avail_out = out.size() - strm.total_out_lo32;
|
||||
|
||||
const char *prev_write_pos = strm.next_out;
|
||||
bzerror = BZ2_bzDecompress(&strm);
|
||||
if (bzerror == BZ_OK && prev_write_pos == strm.next_out) {
|
||||
// content is corrupt
|
||||
bzerror = BZ_STREAM_END;
|
||||
rWarning("decompressBZ2 error : content is corrupt");
|
||||
break;
|
||||
}
|
||||
|
||||
if (bzerror == BZ_OK && strm.avail_in > 0 && strm.avail_out == 0) {
|
||||
out.resize(out.size() * 2);
|
||||
}
|
||||
} while (bzerror == BZ_OK && !(abort && *abort));
|
||||
|
||||
BZ2_bzDecompressEnd(&strm);
|
||||
if (bzerror == BZ_STREAM_END && !(abort && *abort)) {
|
||||
out.resize(strm.total_out_lo32);
|
||||
return out;
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
void precise_nano_sleep(long sleep_ns) {
|
||||
const long estimate_ns = 1 * 1e6; // 1ms
|
||||
struct timespec req = {.tv_nsec = estimate_ns};
|
||||
uint64_t start_sleep = nanos_since_boot();
|
||||
while (sleep_ns > estimate_ns) {
|
||||
nanosleep(&req, nullptr);
|
||||
uint64_t end_sleep = nanos_since_boot();
|
||||
sleep_ns -= (end_sleep - start_sleep);
|
||||
start_sleep = end_sleep;
|
||||
}
|
||||
// spin wait
|
||||
if (sleep_ns > 0) {
|
||||
while ((nanos_since_boot() - start_sleep) <= sleep_ns) {
|
||||
std::this_thread::yield();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
std::string sha256(const std::string &str) {
|
||||
unsigned char hash[SHA256_DIGEST_LENGTH];
|
||||
SHA256_CTX sha256;
|
||||
SHA256_Init(&sha256);
|
||||
SHA256_Update(&sha256, str.c_str(), str.size());
|
||||
SHA256_Final(hash, &sha256);
|
||||
return util::hexdump(hash, SHA256_DIGEST_LENGTH);
|
||||
}
|
||||
34
tools/replay/util.h
Normal file
34
tools/replay/util.h
Normal file
@@ -0,0 +1,34 @@
|
||||
#pragma once
|
||||
|
||||
#include <atomic>
|
||||
#include <functional>
|
||||
#include <string>
|
||||
|
||||
enum class ReplyMsgType {
|
||||
Info,
|
||||
Debug,
|
||||
Warning,
|
||||
Critical
|
||||
};
|
||||
|
||||
typedef std::function<void(ReplyMsgType type, const std::string msg)> ReplayMessageHandler;
|
||||
void installMessageHandler(ReplayMessageHandler);
|
||||
void logMessage(ReplyMsgType type, const char* fmt, ...);
|
||||
|
||||
#define rInfo(fmt, ...) ::logMessage(ReplyMsgType::Info, fmt, ## __VA_ARGS__)
|
||||
#define rDebug(fmt, ...) ::logMessage(ReplyMsgType::Debug, fmt, ## __VA_ARGS__)
|
||||
#define rWarning(fmt, ...) ::logMessage(ReplyMsgType::Warning, fmt, ## __VA_ARGS__)
|
||||
#define rError(fmt, ...) ::logMessage(ReplyMsgType::Critical , fmt, ## __VA_ARGS__)
|
||||
|
||||
std::string sha256(const std::string &str);
|
||||
void precise_nano_sleep(long sleep_ns);
|
||||
std::string decompressBZ2(const std::string &in, std::atomic<bool> *abort = nullptr);
|
||||
std::string decompressBZ2(const std::byte *in, size_t in_size, std::atomic<bool> *abort = nullptr);
|
||||
std::string getUrlWithoutQuery(const std::string &url);
|
||||
size_t getRemoteFileSize(const std::string &url, std::atomic<bool> *abort = nullptr);
|
||||
std::string httpGet(const std::string &url, size_t chunk_size = 0, std::atomic<bool> *abort = nullptr);
|
||||
|
||||
typedef std::function<void(uint64_t cur, uint64_t total, bool success)> DownloadProgressHandler;
|
||||
void installDownloadProgressHandler(DownloadProgressHandler);
|
||||
bool httpDownload(const std::string &url, const std::string &file, size_t chunk_size = 0, std::atomic<bool> *abort = nullptr);
|
||||
std::string formattedDataSize(size_t size);
|
||||
Reference in New Issue
Block a user