-
Notifications
You must be signed in to change notification settings - Fork 4.8k
/
rs-record-playback.cpp
258 lines (223 loc) · 10.4 KB
/
rs-record-playback.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
// License: Apache 2.0. See LICENSE file in root directory.
// Copyright(c) 2017 Intel Corporation. All Rights Reserved.
#include <librealsense2/rs.hpp> // Include RealSense Cross Platform API
#include "example.hpp" // Include short list of convenience functions for rendering
#include <chrono>
#include <imgui.h>
#include "imgui_impl_glfw.h"
// Includes for time display
#include <sstream>
#include <iostream>
#include <iomanip>
// Helper function for dispaying time conveniently
std::string pretty_time(std::chrono::nanoseconds duration);
// Helper function for rendering a seek bar
void draw_seek_bar(rs2::playback& playback, int* seek_pos, float2& location, float width);
int main(int argc, char * argv[]) try
{
// Create a simple OpenGL window for rendering:
window app(1280, 720, "RealSense Record and Playback Example");
ImGui_ImplGlfw_Init(app, false);
// Create booleans to control GUI (recorded - allow play button, recording - show 'recording to file' text)
bool recorded = false;
bool recording = false;
// Declare a texture for the depth image on the GPU
texture depth_image;
// Declare frameset and frames which will hold the data from the camera
rs2::frameset frames;
rs2::frame depth;
// Declare depth colorizer for pretty visualization of depth data
rs2::colorizer color_map;
// Create a shared pointer to a pipeline
auto pipe = std::make_shared<rs2::pipeline>();
// Start streaming with default configuration
pipe->start();
// Initialize a shared pointer to a device with the current device on the pipeline
rs2::device device = pipe->get_active_profile().get_device();
// Create a variable to control the seek bar
int seek_pos;
// While application is running
while(app) {
// Flags for displaying ImGui window
static const int flags = ImGuiWindowFlags_NoCollapse
| ImGuiWindowFlags_NoScrollbar
| ImGuiWindowFlags_NoSavedSettings
| ImGuiWindowFlags_NoTitleBar
| ImGuiWindowFlags_NoResize
| ImGuiWindowFlags_NoMove;
ImGui_ImplGlfw_NewFrame(1);
ImGui::SetNextWindowSize({ app.width(), app.height() });
ImGui::Begin("app", nullptr, flags);
// If the device is sreaming live and not from a file
if (!device.as<rs2::playback>())
{
frames = pipe->wait_for_frames(); // wait for next set of frames from the camera
depth = color_map.process(frames.get_depth_frame()); // Find and colorize the depth data
}
// Set options for the ImGui buttons
ImGui::PushStyleColor(ImGuiCol_TextSelectedBg, { 1, 1, 1, 1 });
ImGui::PushStyleColor(ImGuiCol_Button, { 36 / 255.f, 44 / 255.f, 51 / 255.f, 1 });
ImGui::PushStyleColor(ImGuiCol_ButtonHovered, { 40 / 255.f, 170 / 255.f, 90 / 255.f, 1 });
ImGui::PushStyleColor(ImGuiCol_ButtonActive, { 36 / 255.f, 44 / 255.f, 51 / 255.f, 1 });
ImGui::PushStyleVar(ImGuiStyleVar_FrameRounding, 12);
if (!device.as<rs2::playback>()) // Disable recording while device is playing
{
ImGui::SetCursorPos({ app.width() / 2 - 100, 3 * app.height() / 5 + 90});
ImGui::Text("Click 'record' to start recording");
ImGui::SetCursorPos({ app.width() / 2 - 100, 3 * app.height() / 5 + 110 });
if (ImGui::Button("record", { 50, 50 }))
{
// If it is the start of a new recording (device is not a recorder yet)
if (!device.as<rs2::recorder>())
{
pipe->stop(); // Stop the pipeline with the default configuration
pipe = std::make_shared<rs2::pipeline>();
rs2::config cfg; // Declare a new configuration
cfg.enable_record_to_file("a.bag");
pipe->start(cfg); //File will be opened at this point
device = pipe->get_active_profile().get_device();
}
else
{ // If the recording is resumed after a pause, there's no need to reset the shared pointer
device.as<rs2::recorder>().resume(); // rs2::recorder allows access to 'resume' function
}
recording = true;
}
/*
When pausing, device still holds the file.
*/
if (device.as<rs2::recorder>())
{
if (recording)
{
ImGui::SetCursorPos({ app.width() / 2 - 100, 3 * app.height() / 5 + 60 });
ImGui::TextColored({ 255 / 255.f, 64 / 255.f, 54 / 255.f, 1 }, "Recording to file 'a.bag'");
}
// Pause the playback if button is clicked
ImGui::SetCursorPos({ app.width() / 2, 3 * app.height() / 5 + 110 });
if (ImGui::Button("pause\nrecord", { 50, 50 }))
{
device.as<rs2::recorder>().pause();
recording = false;
}
ImGui::SetCursorPos({ app.width() / 2 + 100, 3 * app.height() / 5 + 110 });
if (ImGui::Button(" stop\nrecord", { 50, 50 }))
{
pipe->stop(); // Stop the pipeline that holds the file and the recorder
pipe = std::make_shared<rs2::pipeline>(); //Reset the shared pointer with a new pipeline
pipe->start(); // Resume streaming with default configuration
device = pipe->get_active_profile().get_device();
recorded = true; // Now we can run the file
recording = false;
}
}
}
// After a recording is done, we can play it
if (recorded) {
ImGui::SetCursorPos({ app.width() / 2 - 100, 4 * app.height() / 5 + 30 });
ImGui::Text("Click 'play' to start playing");
ImGui::SetCursorPos({ app.width() / 2 - 100, 4 * app.height() / 5 + 50});
if (ImGui::Button("play", { 50, 50 }))
{
if (!device.as<rs2::playback>())
{
pipe->stop(); // Stop streaming with default configuration
pipe = std::make_shared<rs2::pipeline>();
rs2::config cfg;
cfg.enable_device_from_file("a.bag");
pipe->start(cfg); //File will be opened in read mode at this point
device = pipe->get_active_profile().get_device();
}
else
{
device.as<rs2::playback>().resume();
}
}
}
// If device is playing a recording, we allow pause and stop
if (device.as<rs2::playback>())
{
rs2::playback playback = device.as<rs2::playback>();
if (pipe->poll_for_frames(&frames)) // Check if new frames are ready
{
depth = color_map.process(frames.get_depth_frame()); // Find and colorize the depth data for rendering
}
// Render a seek bar for the player
float2 location = { app.width() / 4, 4 * app.height() / 5 + 110 };
draw_seek_bar(playback , &seek_pos, location, app.width() / 2);
ImGui::SetCursorPos({ app.width() / 2, 4 * app.height() / 5 + 50 });
if (ImGui::Button(" pause\nplaying", { 50, 50 }))
{
playback.pause();
}
ImGui::SetCursorPos({ app.width() / 2 + 100, 4 * app.height() / 5 + 50 });
if (ImGui::Button(" stop\nplaying", { 50, 50 }))
{
pipe->stop();
pipe = std::make_shared<rs2::pipeline>();
pipe->start();
device = pipe->get_active_profile().get_device();
}
}
ImGui::PopStyleColor(4);
ImGui::PopStyleVar();
ImGui::End();
ImGui::Render();
// Render depth frames from the default configuration, the recorder or the playback
depth_image.render(depth, { app.width() * 0.25f, app.height() * 0.25f, app.width() * 0.5f, app.height() * 0.75f });
}
return EXIT_SUCCESS;
}
catch (const rs2::error & e)
{
std::cout << "RealSense error calling " << e.get_failed_function() << "(" << e.get_failed_args() << "):\n " << e.what() << std::endl;
return EXIT_FAILURE;
}
catch (const std::exception& e)
{
std::cerr << e.what() << std::endl;
return EXIT_FAILURE;
}
std::string pretty_time(std::chrono::nanoseconds duration)
{
using namespace std::chrono;
auto hhh = duration_cast<hours>(duration);
duration -= hhh;
auto mm = duration_cast<minutes>(duration);
duration -= mm;
auto ss = duration_cast<seconds>(duration);
duration -= ss;
auto ms = duration_cast<milliseconds>(duration);
std::ostringstream stream;
stream << std::setfill('0') << std::setw(hhh.count() >= 10 ? 2 : 1) << hhh.count() << ':' <<
std::setfill('0') << std::setw(2) << mm.count() << ':' <<
std::setfill('0') << std::setw(2) << ss.count();
return stream.str();
}
void draw_seek_bar(rs2::playback& playback, int* seek_pos, float2& location, float width)
{
int64_t playback_total_duration = playback.get_duration().count();
auto progress = playback.get_position();
double part = (1.0 * progress) / playback_total_duration;
*seek_pos = static_cast<int>(std::max(0.0, std::min(part, 1.0)) * 100);
auto playback_status = playback.current_status();
ImGui::PushItemWidth(width);
ImGui::SetCursorPos({ location.x, location.y });
ImGui::PushStyleVar(ImGuiStyleVar_FrameRounding, 12);
if (ImGui::SliderInt("##seek bar", seek_pos, 0, 100, "", true))
{
//Seek was dragged
if (playback_status != RS2_PLAYBACK_STATUS_STOPPED) //Ignore seek when playback is stopped
{
auto duration_db = std::chrono::duration_cast<std::chrono::duration<double, std::nano>>(playback.get_duration());
auto single_percent = duration_db.count() / 100;
auto seek_time = std::chrono::duration<double, std::nano>((*seek_pos) * single_percent);
playback.seek(std::chrono::duration_cast<std::chrono::nanoseconds>(seek_time));
}
}
std::string time_elapsed = pretty_time(std::chrono::nanoseconds(progress));
ImGui::SetCursorPos({ location.x + width + 10, location.y });
ImGui::Text("%s", time_elapsed.c_str());
ImGui::PopStyleVar();
ImGui::PopItemWidth();
}