VideoStream.cpp 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192
  1. #include "VideoStream.h"
  2. #ifdef FFMPEG_ENABLED
  3. #include <iostream>
  4. #if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(55,28,1)
  5. #define av_frame_alloc avcodec_alloc_frame
  6. #define av_frame_free avcodec_free_frame
  7. #endif
  8. const uint8_t VideoStream::endcode[] = { 0, 0, 1, 0xb7 };
  9. VideoStream::VideoStream(int width, int height, const std::string& filename) :
  10. width{ width & (~1) }, height{ height & (~1) }
  11. {
  12. // only needed with ffmpeg version < 4
  13. //avcodec_register_all();
  14. codec = avcodec_find_encoder(AV_CODEC_ID_H264);
  15. if (!codec) {
  16. fprintf(stderr, "invalid codec\n");
  17. exit(1);
  18. }
  19. codecContext = avcodec_alloc_context3(codec);
  20. pkt = av_packet_alloc();
  21. if (!pkt)
  22. exit(1);
  23. codecContext->bit_rate = 50 * 1000 * 1000;
  24. codecContext->width = width;
  25. codecContext->height = height;
  26. codecContext->time_base = AVRational{ 1, 60 };
  27. codecContext->framerate = AVRational{ 60, 1 };
  28. codecContext->gop_size = 5; /* emit one intra frame every five frames */
  29. codecContext->max_b_frames = 1;
  30. codecContext->pix_fmt = AV_PIX_FMT_YUV420P;
  31. if (codec->id == AV_CODEC_ID_H264)
  32. av_opt_set(codecContext->priv_data, "preset", "slow", 0);
  33. if (avcodec_open2(codecContext, codec, nullptr) < 0) {
  34. fprintf(stderr, "could not open codec\n");
  35. exit(1);
  36. }
  37. file = fopen(filename.c_str(), "wb");
  38. if (!file) {
  39. fprintf(stderr, "could not open %s\n", filename.c_str());
  40. exit(1);
  41. }
  42. picture = av_frame_alloc();
  43. picture->format = codecContext->pix_fmt;
  44. picture->width = codecContext->width;
  45. picture->height = codecContext->height;
  46. int retval = av_frame_get_buffer(picture, 0);
  47. if (retval < 0) {
  48. fprintf(stderr, "could not alloc the frame data\n");
  49. exit(1);
  50. }
  51. //av_image_alloc(picture->data, picture->linesize, width, height, codecContext->pix_fmt, 32);
  52. swsContext = sws_getContext(width, height,
  53. AV_PIX_FMT_RGB24, width, height,
  54. AV_PIX_FMT_YUV420P, 0, 0, 0, 0);
  55. }
  56. static void encode(AVCodecContext *enc_ctx, AVFrame *frame, AVPacket *pkt,
  57. FILE *outfile)
  58. {
  59. int ret;
  60. /* send the frame to the encoder */
  61. ret = avcodec_send_frame(enc_ctx, frame);
  62. if (ret < 0) {
  63. fprintf(stderr, "error sending a frame for encoding\n");
  64. exit(1);
  65. }
  66. while (ret >= 0) {
  67. ret = avcodec_receive_packet(enc_ctx, pkt);
  68. if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF)
  69. return;
  70. else if (ret < 0) {
  71. fprintf(stderr, "error during encoding\n");
  72. exit(1);
  73. }
  74. printf("encoded frame %3d\"PRId64\" (size=%5d)\n", pkt->pts, pkt->size);
  75. fwrite(pkt->data, 1, pkt->size, outfile);
  76. av_packet_unref(pkt);
  77. }
  78. }
  79. VideoStream::~VideoStream()
  80. {
  81. /* flush the encoder */
  82. encode(codecContext, nullptr, pkt, file);
  83. /* add sequence end code to have a real MPEG file */
  84. fwrite(endcode, 1, sizeof(endcode), file);
  85. fclose(file);
  86. avcodec_free_context(&codecContext);
  87. av_frame_free(&picture);
  88. av_packet_free(&pkt);
  89. /*
  90. AVPacket pkt;
  91. av_init_packet(&pkt);
  92. pkt.data = nullptr;
  93. pkt.size = 0;
  94. for (;;) {
  95. avcodec_send_frame(codecContext, NULL);
  96. if (avcodec_receive_packet(codecContext, &pkt) == 0) {
  97. av_interleaved_write_frame(codecContext, &pkt);
  98. av_packet_unref(&pkt);
  99. }
  100. else {
  101. break;
  102. }
  103. }
  104. av_write_trailer();
  105. if (!(oformat->flags & AVFMT_NOFILE)) {
  106. int err = avio_close(ofctx->pb);
  107. if (err < 0) {
  108. Debug("Failed to close file", err);
  109. }
  110. }*/
  111. }
  112. void VideoStream::addFrame(const Bitmap<RGBColor>& frame)
  113. {
  114. int retval = av_frame_make_writable(picture);
  115. if (retval < 0)
  116. exit(1);
  117. /* prepare a dummy image */
  118. /* Y */
  119. /*for(int y = 0; y < height; y++) {
  120. for(int x = 0; x < width; x++) {
  121. picture->data[0][y * picture->linesize[0] + x] = frame.get(x, y).r / 2;
  122. }
  123. }*/
  124. /* Cb and Cr */
  125. /*for(int y=0;y<height / 2;y++) {
  126. for(int x=0;x<width / 2;x++) {
  127. picture->data[1][y * picture->linesize[1] + x] = frame.get(x * 2, y * 2).g / 2;
  128. picture->data[2][y * picture->linesize[2] + x] = frame.get(x * 2, y * 2).b / 2;
  129. }
  130. }*/
  131. /*auto gammaCorrect = [] (const RGBColor& rgb) {
  132. const float gamma = 2.2f;
  133. return RGBColor {
  134. uint8_t(::powf(rgb.r / 255.0f, 1.0f / gamma) * 255),
  135. uint8_t(::powf(rgb.g / 255.0f, 1.0f / gamma) * 255),
  136. uint8_t(::powf(rgb.b / 255.0f, 1.0f / gamma) * 255),
  137. };
  138. };
  139. Bitmap<RGBColor> gammaCorrected = frame.map<RGBColor>(gammaCorrect);*/
  140. const uint8_t* pixelPointer[] = { reinterpret_cast<const uint8_t*>(frame.pixels.get()), 0 };
  141. const int linesizeIn[] = { int(frame.width * sizeof(RGBColor)) };
  142. sws_scale(swsContext, pixelPointer, linesizeIn, 0,
  143. frame.height, picture->data, picture->linesize);
  144. picture->pts = frameIndex++;
  145. /* encode the image */
  146. encode(codecContext, picture, pkt, file);
  147. }
  148. #endif // FFMPEG_ENABLED