Android - 在使用 WebRTC 发送到 Wowza Streaming Engine 之前旋转视频帧

Android - Rotate video frames before sending to Wowza Streaming Engine using WebRTC

我想使用 WebRTC 将视频从 android 摄像头流式传输到 Wowza Streaming Engine (WSE)。当设备处于横向模式时,一切正常。然后我尝试通过将设备置于纵向模式来进行流式传输。

我在 WSE 播放器中注意到的第一件事是视频流已逆时针旋转 90。我发现 WebRTC 在发送到 WSE 之前不会旋转来自 onPreviewFrame API 的每个视频帧,不幸的是 WSE 不会至少到目前为止支持在他们身边旋转视频帧的任何机制。

所以我检查了 WebRTC android 本机源代码并修改它以在发送到 WSE 之前旋转每个视频帧。现在我可以在 WSE 播放器中看到纵向模式的视频流。

但它有一个问题,有时视频流看起来很奇怪。请看下面的图片。

A normal image

A weird image

我把相机放在固定位置。 WSE播放器第一次显示第一个,但有时会出现第二个。

这是我更改的WebRTC 源代码中的文件。 ~/webrtc/src/sdk/android/src/jni/androidvideotracksource.cc

void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
                                                        int length,
                                                        int width,
                                                        int height,
                                                        VideoRotation rotation,
                                                        int64_t timestamp_ns) {
  RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());

  int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
  int64_t translated_camera_time_us =
      timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());

  int adapted_width;
  int adapted_height;
  int crop_width;
  int crop_height;
  int crop_x;
  int crop_y;

  if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
                  &adapted_height, &crop_width, &crop_height, &crop_x,
                  &crop_y)) {
    return;
  }

  const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data);
  const uint8_t* uv_plane = y_plane + width * height;
  const int uv_width = (width + 1) / 2;

  RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2));

  // Can only crop at even pixels.
  crop_x &= ~1;
  crop_y &= ~1;
  // Crop just by modifying pointers.
  y_plane += width * crop_y + crop_x;
  uv_plane += uv_width * crop_y + crop_x;

  rtc::scoped_refptr<I420Buffer> buffer =
      buffer_pool_.CreateBuffer(adapted_width, adapted_height);

  nv12toi420_scaler_.NV12ToI420Scale(
      y_plane, width, uv_plane, uv_width * 2, crop_width, crop_height,
      buffer->MutableDataY(), buffer->StrideY(),
      // Swap U and V, since we have NV21, not NV12.
      buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(),
      buffer->StrideU(), buffer->width(), buffer->height());

  // TODO: Rotate I420 frame 90 degrees clockwise.
  rtc::scoped_refptr<I420Buffer> rotated_buffer =
      I420Buffer::Rotate(*buffer, kVideoRotation_90);

  OnFrame(VideoFrame(rotated_buffer, rotation, translated_camera_time_us));
}

我添加了这行代码来顺时针旋转 I420 框架 90 度。

// TODO: Rotate I420 frame 90 degrees clockwise.
  rtc::scoped_refptr<I420Buffer> rotated_buffer =
      I420Buffer::Rotate(*buffer, kVideoRotation_90);

如有任何帮助,我将不胜感激!

终于想到了解决这个问题的方法。以下是我的步骤:

第 1 步:确保将流媒体 activity 锁定为纵向

第 2 步: 在 WebRTC 的文件中更改此方法 android 本机源代码 ~/webrtc/src/sdk/android/src/jni/androidvideotracksource.cc

原版:

void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
                                                        int length,
                                                        int width,
                                                        int height,
                                                        VideoRotation rotation,
                                                        int64_t timestamp_ns) {
  RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());

  int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
  int64_t translated_camera_time_us =
      timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());

  int adapted_width;
  int adapted_height;
  int crop_width;
  int crop_height;
  int crop_x;
  int crop_y;

  if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
                  &adapted_height, &crop_width, &crop_height, &crop_x,
                  &crop_y)) {
    return;
  }

  const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data);
  const uint8_t* uv_plane = y_plane + width * height;
  const int uv_width = (width + 1) / 2;

  RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2));

  // Can only crop at even pixels.
  crop_x &= ~1;
  crop_y &= ~1;
  // Crop just by modifying pointers.
  y_plane += width * crop_y + crop_x;
  uv_plane += uv_width * crop_y + crop_x;

  rtc::scoped_refptr<I420Buffer> buffer =
      buffer_pool_.CreateBuffer(adapted_width, adapted_height);

  nv12toi420_scaler_.NV12ToI420Scale(
      y_plane, width, uv_plane, uv_width * 2, crop_width, crop_height,
      buffer->MutableDataY(), buffer->StrideY(),
      // Swap U and V, since we have NV21, not NV12.
      buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(),
      buffer->StrideU(), buffer->width(), buffer->height());

  OnFrame(VideoFrame(rotated_buffer, rotation, translated_camera_time_us));
}

修改版本:

void AndroidVideoTrackSource::OnByteBufferFrameCaptured(const void* frame_data,
                                                        int length,
                                                        int width,
                                                        int height,
                                                        VideoRotation rotation,
                                                        int64_t timestamp_ns) {
  RTC_DCHECK(camera_thread_checker_.CalledOnValidThread());

  int64_t camera_time_us = timestamp_ns / rtc::kNumNanosecsPerMicrosec;
  int64_t translated_camera_time_us =
      timestamp_aligner_.TranslateTimestamp(camera_time_us, rtc::TimeMicros());

  int adapted_width;
  int adapted_height;
  int crop_width;
  int crop_height;
  int crop_x;
  int crop_y;

  if (!AdaptFrame(width, height, camera_time_us, &adapted_width,
                  &adapted_height, &crop_width, &crop_height, &crop_x,
                  &crop_y)) {
    return;
  }

  const uint8_t* y_plane = static_cast<const uint8_t*>(frame_data);
  const uint8_t* uv_plane = y_plane + width * height;
  const int uv_width = (width + 1) / 2;

  RTC_CHECK_GE(length, width * height + 2 * uv_width * ((height + 1) / 2));

  // Can only crop at even pixels.
  crop_x &= ~1;
  crop_y &= ~1;
  // Crop just by modifying pointers.
  y_plane += width * crop_y + crop_x;
  uv_plane += uv_width * crop_y + crop_x;

  rtc::scoped_refptr<I420Buffer> buffer =
      buffer_pool_.CreateBuffer(adapted_width, adapted_height);

  nv12toi420_scaler_.NV12ToI420Scale(
      y_plane, width, uv_plane, uv_width * 2, crop_width, crop_height,
      buffer->MutableDataY(), buffer->StrideY(),
      // Swap U and V, since we have NV21, not NV12.
      buffer->MutableDataV(), buffer->StrideV(), buffer->MutableDataU(),
      buffer->StrideU(), buffer->width(), buffer->height());

  // TODO: Comment out this line of code to apply custom code.
  // OnFrame(VideoFrame(buffer, rotation, translated_camera_time_us));

  // TODO: The custom code to rotate video frame before passing
  // to next layers of WebRTC.

  // Rotate I420 frame rotation degrees.
  // Value of the rotation is 90 or 270 based on camera orientation.
  rtc::scoped_refptr<I420Buffer> rotated_buffer =
      I420Buffer::Rotate(*buffer, rotation);

  // Make sure the I420 frame has valid side in portrait mode.
  rtc::scoped_refptr<I420Buffer> final_buffer =
      buffer_pool_.CreateBuffer(height, width);
  final_buffer->ScaleFrom(*rotated_buffer);

  // After rotating the I420 frame, set value of the rotation to 0.
  // This mean we do not want to rotate the frame in next layers anymore.
  rotation = kVideoRotation_0;

  // Pass processed frame to the next layers.
  OnFrame(VideoFrame(final_buffer, rotation, translated_camera_time_us));
}

现在我的流在 Streaming Wowza Engine Player 上完美显示了。