video-viewer.cc 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426
  1. // -*- mode: c++; c-basic-offset: 2; indent-tabs-mode: nil; -*-
  2. //
  3. // Quick hack based on ffmpeg
  4. // tutorial http://dranger.com/ffmpeg/tutorial01.html
  5. // in turn based on a tutorial by
  6. // Martin Bohme (boehme@inb.uni-luebeckREMOVETHIS.de)
  7. //
  8. // HELP NEEDED
  9. // Note, this is known to not be optimal, causing flicker etc. It is at this
  10. // point merely a demonstration of what is possible. It also serves as a
  11. // converter to a 'stream' (-O option) which then can be played quickly with
  12. // the led-image-viewer.
  13. //
  14. // Pull requests are welcome to address
  15. // * Use hardware acceleration if possible. The Pi does have some
  16. // acceleration features IIRC, so if we could use these, that would be
  17. // great.
  18. // * Other improvements that could reduce the flicker on a Raspberry Pi.
  19. // Currently it seems to create flicker in particular when decoding larger
  20. // videos due to memory bandwidth overload (?). Might already be fixed
  21. // with using hardware acceleration.
  22. // * Add sound ? Right now, we don't decode the sound. It is usually
  23. // not very useful as the builtin-sound is disabled when running the
  24. // LED matrix, but if there is an external USB sound adapter, it might
  25. // be nice.
  26. // Ancient AV versions forgot to set this.
  27. #define __STDC_CONSTANT_MACROS
  28. // libav: "U NO extern C in header ?"
  29. extern "C" {
  30. # include <libavcodec/avcodec.h>
  31. # include <libavformat/avformat.h>
  32. # include <libavutil/imgutils.h>
  33. # include <libswscale/swscale.h>
  34. }
  35. #include <fcntl.h>
  36. #include <getopt.h>
  37. #include <limits.h>
  38. #include <signal.h>
  39. #include <stdio.h>
  40. #include <sys/stat.h>
  41. #include <sys/types.h>
  42. #include <time.h>
  43. #include <unistd.h>
  44. #include "led-matrix.h"
  45. #include "content-streamer.h"
  46. using rgb_matrix::FrameCanvas;
  47. using rgb_matrix::RGBMatrix;
  48. using rgb_matrix::StreamWriter;
  49. using rgb_matrix::StreamIO;
  50. volatile bool interrupt_received = false;
  51. static void InterruptHandler(int) {
  52. interrupt_received = true;
  53. }
  54. struct LedPixel {
  55. uint8_t r, g, b;
  56. };
  57. void CopyFrame(AVFrame *pFrame, FrameCanvas *canvas,
  58. int offset_x, int offset_y,
  59. int width, int height) {
  60. for (int y = 0; y < height; ++y) {
  61. LedPixel *pix = (LedPixel*) (pFrame->data[0] + y*pFrame->linesize[0]);
  62. for (int x = 0; x < width; ++x, ++pix) {
  63. canvas->SetPixel(x + offset_x, y + offset_y, pix->r, pix->g, pix->b);
  64. }
  65. }
  66. }
  67. // Scale "width" and "height" to fit within target rectangle of given size.
  68. void ScaleToFitKeepAscpet(int fit_in_width, int fit_in_height,
  69. int *width, int *height) {
  70. if (*height < fit_in_height && *width < fit_in_width) return; // Done.
  71. const float height_ratio = 1.0 * (*height) / fit_in_height;
  72. const float width_ratio = 1.0 * (*width) / fit_in_width;
  73. const float ratio = (height_ratio > width_ratio) ? height_ratio : width_ratio;
  74. *width = roundf(*width / ratio);
  75. *height = roundf(*height / ratio);
  76. }
  77. static int usage(const char *progname, const char *msg = NULL) {
  78. if (msg) {
  79. fprintf(stderr, "%s\n", msg);
  80. }
  81. fprintf(stderr, "Show one or a sequence of video files on the RGB-Matrix\n");
  82. fprintf(stderr, "usage: %s [options] <video> [<video>...]\n", progname);
  83. fprintf(stderr, "Options:\n"
  84. "\t-F : Full screen without black bars; aspect ratio might suffer\n"
  85. "\t-O<streamfile> : Output to stream-file instead of matrix (don't need to be root).\n"
  86. "\t-s <count> : Skip these number of frames in the beginning.\n"
  87. "\t-c <count> : Only show this number of frames (excluding skipped frames).\n"
  88. "\t-V<vsync-multiple> : Instead of native video framerate, playback framerate\n"
  89. "\t is a fraction of matrix refresh. In particular with a stable refresh,\n"
  90. "\t this can result in more smooth playback. Choose multiple for desired framerate.\n"
  91. "\t (Tip: use --led-limit-refresh for stable rate)\n"
  92. "\t-v : verbose; prints video metadata and other info.\n"
  93. "\t-f : Loop forever.\n");
  94. fprintf(stderr, "\nGeneral LED matrix options:\n");
  95. rgb_matrix::PrintMatrixFlags(stderr);
  96. return 1;
  97. }
  98. static void add_nanos(struct timespec *accumulator, long nanoseconds) {
  99. accumulator->tv_nsec += nanoseconds;
  100. while (accumulator->tv_nsec > 1000000000) {
  101. accumulator->tv_nsec -= 1000000000;
  102. accumulator->tv_sec += 1;
  103. }
  104. }
  105. // Convert deprecated color formats to new and manually set the color range.
  106. // YUV has funny ranges (16-235), while the YUVJ are 0-255. SWS prefers to
  107. // deal with the YUV range, but then requires to set the output range.
  108. // https://libav.org/documentation/doxygen/master/pixfmt_8h.html#a9a8e335cf3be472042bc9f0cf80cd4c5
  109. SwsContext *CreateSWSContext(const AVCodecContext *codec_ctx,
  110. int display_width, int display_height) {
  111. AVPixelFormat pix_fmt;
  112. bool src_range_extended_yuvj = true;
  113. // Remap deprecated to new pixel format.
  114. switch (codec_ctx->pix_fmt) {
  115. case AV_PIX_FMT_YUVJ420P: pix_fmt = AV_PIX_FMT_YUV420P; break;
  116. case AV_PIX_FMT_YUVJ422P: pix_fmt = AV_PIX_FMT_YUV422P; break;
  117. case AV_PIX_FMT_YUVJ444P: pix_fmt = AV_PIX_FMT_YUV444P; break;
  118. case AV_PIX_FMT_YUVJ440P: pix_fmt = AV_PIX_FMT_YUV440P; break;
  119. default:
  120. src_range_extended_yuvj = false;
  121. pix_fmt = codec_ctx->pix_fmt;
  122. }
  123. SwsContext *swsCtx = sws_getContext(codec_ctx->width, codec_ctx->height,
  124. pix_fmt,
  125. display_width, display_height,
  126. AV_PIX_FMT_RGB24, SWS_BILINEAR,
  127. NULL, NULL, NULL);
  128. if (src_range_extended_yuvj) {
  129. // Manually set the source range to be extended. Read modify write.
  130. int dontcare[4];
  131. int src_range, dst_range;
  132. int brightness, contrast, saturation;
  133. sws_getColorspaceDetails(swsCtx, (int**)&dontcare, &src_range,
  134. (int**)&dontcare, &dst_range, &brightness,
  135. &contrast, &saturation);
  136. const int* coefs = sws_getCoefficients(SWS_CS_DEFAULT);
  137. src_range = 1; // New src range.
  138. sws_setColorspaceDetails(swsCtx, coefs, src_range, coefs, dst_range,
  139. brightness, contrast, saturation);
  140. }
  141. return swsCtx;
  142. }
  143. int main(int argc, char *argv[]) {
  144. RGBMatrix::Options matrix_options;
  145. rgb_matrix::RuntimeOptions runtime_opt;
  146. if (!rgb_matrix::ParseOptionsFromFlags(&argc, &argv,
  147. &matrix_options, &runtime_opt)) {
  148. return usage(argv[0]);
  149. }
  150. int vsync_multiple = 1;
  151. bool use_vsync_for_frame_timing = false;
  152. bool maintain_aspect_ratio = true;
  153. bool verbose = false;
  154. bool forever = false;
  155. int stream_output_fd = -1;
  156. unsigned int frame_skip = 0;
  157. unsigned int framecount_limit = UINT_MAX; // even at 60fps, that is > 2yrs
  158. int opt;
  159. while ((opt = getopt(argc, argv, "vO:R:Lfc:s:FV:")) != -1) {
  160. switch (opt) {
  161. case 'v':
  162. verbose = true;
  163. break;
  164. case 'f':
  165. forever = true;
  166. break;
  167. case 'O':
  168. stream_output_fd = open(optarg, O_CREAT|O_TRUNC|O_WRONLY, 0644);
  169. if (stream_output_fd < 0) {
  170. perror("Couldn't open output stream");
  171. return 1;
  172. }
  173. break;
  174. case 'L':
  175. fprintf(stderr, "-L is deprecated. Use\n\t--led-pixel-mapper=\"U-mapper\" --led-chain=4\ninstead.\n");
  176. return 1;
  177. break;
  178. case 'R':
  179. fprintf(stderr, "-R is deprecated. "
  180. "Use --led-pixel-mapper=\"Rotate:%s\" instead.\n", optarg);
  181. return 1;
  182. break;
  183. case 'c':
  184. framecount_limit = atoi(optarg);
  185. break;
  186. case 's':
  187. frame_skip = atoi(optarg);
  188. break;
  189. case 'F':
  190. maintain_aspect_ratio = false;
  191. break;
  192. case 'V':
  193. vsync_multiple = atoi(optarg);
  194. if (vsync_multiple <= 0)
  195. return usage(argv[0],
  196. "-V: VSync-multiple needs to be a positive integer");
  197. use_vsync_for_frame_timing = true;
  198. break;
  199. default:
  200. return usage(argv[0]);
  201. }
  202. }
  203. if (optind >= argc) {
  204. fprintf(stderr, "Expected video filename.\n");
  205. return usage(argv[0]);
  206. }
  207. const bool multiple_videos = (argc > optind + 1);
  208. // We want to have the matrix start unless we actually write to a stream.
  209. runtime_opt.do_gpio_init = (stream_output_fd < 0);
  210. RGBMatrix *matrix = RGBMatrix::CreateFromOptions(matrix_options, runtime_opt);
  211. if (matrix == NULL) {
  212. return 1;
  213. }
  214. FrameCanvas *offscreen_canvas = matrix->CreateFrameCanvas();
  215. long frame_count = 0;
  216. StreamIO *stream_io = NULL;
  217. StreamWriter *stream_writer = NULL;
  218. if (stream_output_fd >= 0) {
  219. stream_io = new rgb_matrix::FileStreamIO(stream_output_fd);
  220. stream_writer = new StreamWriter(stream_io);
  221. if (forever) {
  222. fprintf(stderr, "-f (forever) doesn't make sense with -O; disabling\n");
  223. forever = false;
  224. }
  225. }
  226. // If we only have to loop a single video, we can avoid doing the
  227. // expensive video stream set-up and just repeat in an inner loop.
  228. const bool one_video_forever = forever && !multiple_videos;
  229. const bool multiple_video_forever = forever && multiple_videos;
  230. #if LIBAVFORMAT_VERSION_INT < AV_VERSION_INT(58, 9, 100)
  231. av_register_all();
  232. #endif
  233. avformat_network_init();
  234. signal(SIGTERM, InterruptHandler);
  235. signal(SIGINT, InterruptHandler);
  236. do {
  237. for (int m = optind; m < argc && !interrupt_received; ++m) {
  238. const char *movie_file = argv[m];
  239. if (strcmp(movie_file, "-") == 0) {
  240. movie_file = "/dev/stdin";
  241. }
  242. AVFormatContext *format_context = avformat_alloc_context();
  243. if (avformat_open_input(&format_context, movie_file, NULL, NULL) != 0) {
  244. perror("Issue opening file: ");
  245. return -1;
  246. }
  247. if (avformat_find_stream_info(format_context, NULL) < 0) {
  248. fprintf(stderr, "Couldn't find stream information\n");
  249. return -1;
  250. }
  251. if (verbose) av_dump_format(format_context, 0, movie_file, 0);
  252. // Find the first video stream
  253. int videoStream = -1;
  254. AVCodecParameters *codec_parameters = NULL;
  255. AVCodec *av_codec = NULL;
  256. for (int i = 0; i < (int)format_context->nb_streams; ++i) {
  257. codec_parameters = format_context->streams[i]->codecpar;
  258. av_codec = avcodec_find_decoder(codec_parameters->codec_id);
  259. if (!av_codec) continue;
  260. if (codec_parameters->codec_type == AVMEDIA_TYPE_VIDEO) {
  261. videoStream = i;
  262. break;
  263. }
  264. }
  265. if (videoStream == -1)
  266. return false;
  267. // Frames per second; calculate wait time between frames.
  268. AVStream *const stream = format_context->streams[videoStream];
  269. AVRational rate = av_guess_frame_rate(format_context, stream, NULL);
  270. const long frame_wait_nanos = 1e9 * rate.den / rate.num;
  271. if (verbose) fprintf(stderr, "FPS: %f\n", 1.0*rate.num / rate.den);
  272. AVCodecContext *codec_context = avcodec_alloc_context3(av_codec);
  273. if (avcodec_parameters_to_context(codec_context, codec_parameters) < 0)
  274. return -1;
  275. if (avcodec_open2(codec_context, av_codec, NULL) < 0)
  276. return -1;
  277. /*
  278. * Prepare frame to hold the scaled target frame to be send to matrix.
  279. */
  280. int display_width = codec_context->width;
  281. int display_height = codec_context->height;
  282. if (maintain_aspect_ratio) {
  283. display_width = codec_context->width;
  284. display_height = codec_context->height;
  285. // Make display fit within canvas.
  286. ScaleToFitKeepAscpet(matrix->width(), matrix->height(),
  287. &display_width, &display_height);
  288. } else {
  289. display_width = matrix->width();
  290. display_height = matrix->height();
  291. }
  292. // Letterbox or pillarbox black bars.
  293. const int display_offset_x = (matrix->width() - display_width)/2;
  294. const int display_offset_y = (matrix->height() - display_height)/2;
  295. // The output_frame_ will receive the scaled result.
  296. AVFrame *output_frame = av_frame_alloc();
  297. if (av_image_alloc(output_frame->data, output_frame->linesize,
  298. display_width, display_height, AV_PIX_FMT_RGB24,
  299. 64) < 0) {
  300. return -1;
  301. }
  302. if (verbose) {
  303. fprintf(stderr, "Scaling %dx%d -> %dx%d; black border x:%d y:%d\n",
  304. codec_context->width, codec_context->height,
  305. display_width, display_height,
  306. display_offset_x, display_offset_y);
  307. }
  308. // initialize SWS context for software scaling
  309. SwsContext *const sws_ctx = CreateSWSContext(
  310. codec_context, display_width, display_height);
  311. if (!sws_ctx) {
  312. fprintf(stderr, "Trouble doing scaling to %dx%d :(\n",
  313. matrix->width(), matrix->height());
  314. return 1;
  315. }
  316. struct timespec next_frame;
  317. AVPacket *packet = av_packet_alloc();
  318. AVFrame *decode_frame = av_frame_alloc(); // Decode video into this
  319. do {
  320. unsigned int frames_left = framecount_limit;
  321. unsigned int frames_to_skip = frame_skip;
  322. if (one_video_forever) {
  323. av_seek_frame(format_context, videoStream, 0, AVSEEK_FLAG_ANY);
  324. avcodec_flush_buffers(codec_context);
  325. }
  326. clock_gettime(CLOCK_MONOTONIC, &next_frame);
  327. while (!interrupt_received && av_read_frame(format_context, packet) >= 0
  328. && frames_left > 0) {
  329. // Is this a packet from the video stream?
  330. if (packet->stream_index == videoStream) {
  331. // Determine absolute end of this frame now so that we don't include
  332. // decoding overhead. TODO: skip frames if getting too slow ?
  333. add_nanos(&next_frame, frame_wait_nanos);
  334. // Decode video frame
  335. if (avcodec_send_packet(codec_context, packet) < 0)
  336. continue;
  337. if (avcodec_receive_frame(codec_context, decode_frame) < 0)
  338. continue;
  339. if (frames_to_skip) { frames_to_skip--; continue; }
  340. // Convert the image from its native format to RGB
  341. sws_scale(sws_ctx, (uint8_t const * const *)decode_frame->data,
  342. decode_frame->linesize, 0, codec_context->height,
  343. output_frame->data, output_frame->linesize);
  344. CopyFrame(output_frame, offscreen_canvas,
  345. display_offset_x, display_offset_y,
  346. display_width, display_height);
  347. frame_count++;
  348. frames_left--;
  349. if (stream_writer) {
  350. if (verbose) fprintf(stderr, "%6ld", frame_count);
  351. stream_writer->Stream(*offscreen_canvas, frame_wait_nanos/1000);
  352. } else {
  353. offscreen_canvas = matrix->SwapOnVSync(offscreen_canvas,
  354. vsync_multiple);
  355. }
  356. }
  357. if (!stream_writer && !use_vsync_for_frame_timing) {
  358. clock_nanosleep(CLOCK_MONOTONIC, TIMER_ABSTIME, &next_frame, NULL);
  359. }
  360. av_packet_unref(packet);
  361. }
  362. } while (one_video_forever && !interrupt_received);
  363. av_packet_free(&packet);
  364. av_frame_free(&output_frame);
  365. av_frame_free(&decode_frame);
  366. avcodec_close(codec_context);
  367. avformat_close_input(&format_context);
  368. }
  369. } while (multiple_video_forever && !interrupt_received);
  370. if (interrupt_received) {
  371. // Feedback for Ctrl-C, but most importantly, force a newline
  372. // at the output, so that commandline-shell editing is not messed up.
  373. fprintf(stderr, "Got interrupt. Exiting\n");
  374. }
  375. delete matrix;
  376. delete stream_writer;
  377. delete stream_io;
  378. fprintf(stderr, "Total of %ld frames decoded\n", frame_count);
  379. return 0;
  380. }