|
19 | 19 | #include "third_party/libyuv/include/libyuv.h"
|
20 | 20 |
|
21 | 21 | #import "components/capturer/RTCDesktopCapturer+Private.h"
|
| 22 | +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" |
22 | 23 |
|
23 | 24 | namespace webrtc {
|
24 | 25 |
|
25 | 26 | enum { kCaptureDelay = 33, kCaptureMessageId = 1000 };
|
26 | 27 |
|
27 | 28 | ObjCDesktopCapturer::ObjCDesktopCapturer(DesktopType type,
|
28 |
| - webrtc::DesktopCapturer::SourceId source_id, |
29 |
| - id<RTC_OBJC_TYPE(DesktopCapturerDelegate)> delegate) |
| 29 | + webrtc::DesktopCapturer::SourceId source_id, |
| 30 | + id<RTC_OBJC_TYPE(DesktopCapturerDelegate)> delegate) |
30 | 31 | : thread_(rtc::Thread::Create()), source_id_(source_id), delegate_(delegate) {
|
31 | 32 | RTC_DCHECK(thread_);
|
32 | 33 | type_ = type;
|
|
36 | 37 | options_.set_allow_iosurface(true);
|
37 | 38 | thread_->Invoke<void>(RTC_FROM_HERE, [this, type] {
|
38 | 39 | if (type == kScreen) {
|
39 |
| - capturer_ = std::make_unique<DesktopAndCursorComposer>(webrtc::DesktopCapturer::CreateScreenCapturer(options_), options_); |
40 |
| - } else { |
41 |
| - capturer_ = std::make_unique<DesktopAndCursorComposer>(webrtc::DesktopCapturer::CreateWindowCapturer(options_), options_); |
| 40 | + capturer_ = std::make_unique<DesktopAndCursorComposer>( |
| 41 | + webrtc::DesktopCapturer::CreateScreenCapturer(options_), options_); |
| 42 | + } else { |
| 43 | + capturer_ = std::make_unique<DesktopAndCursorComposer>( |
| 44 | + webrtc::DesktopCapturer::CreateWindowCapturer(options_), options_); |
42 | 45 | }
|
43 | 46 | });
|
44 | 47 | }
|
45 | 48 |
|
46 | 49 | ObjCDesktopCapturer::~ObjCDesktopCapturer() {
|
47 |
| - thread_->Invoke<void>(RTC_FROM_HERE, [this] { |
48 |
| - capturer_.reset(); |
49 |
| - }); |
| 50 | + thread_->Invoke<void>(RTC_FROM_HERE, [this] { capturer_.reset(); }); |
50 | 51 | }
|
51 | 52 |
|
52 | 53 | ObjCDesktopCapturer::CaptureState ObjCDesktopCapturer::Start(uint32_t fps) {
|
53 |
| - |
54 |
| - if(fps == 0) { |
55 |
| - capture_state_ = CS_FAILED; |
56 |
| - return capture_state_; |
| 54 | + if (fps == 0) { |
| 55 | + capture_state_ = CS_FAILED; |
| 56 | + return capture_state_; |
57 | 57 | }
|
58 | 58 |
|
59 |
| - if(fps >= 60) { |
| 59 | + if (fps >= 60) { |
60 | 60 | capture_delay_ = uint32_t(1000.0 / 60.0);
|
61 | 61 | } else {
|
62 | 62 | capture_delay_ = uint32_t(1000.0 / fps);
|
63 | 63 | }
|
64 | 64 |
|
65 |
| - if(source_id_ != -1) { |
66 |
| - if(!capturer_->SelectSource(source_id_)) { |
67 |
| - capture_state_ = CS_FAILED; |
68 |
| - return capture_state_; |
| 65 | + if (source_id_ != -1) { |
| 66 | + if (!capturer_->SelectSource(source_id_)) { |
| 67 | + capture_state_ = CS_FAILED; |
| 68 | + return capture_state_; |
69 | 69 | }
|
70 |
| - if(type_ == kWindow) { |
71 |
| - if(!capturer_->FocusOnSelectedSource()) { |
| 70 | + if (type_ == kWindow) { |
| 71 | + if (!capturer_->FocusOnSelectedSource()) { |
72 | 72 | capture_state_ = CS_FAILED;
|
73 | 73 | return capture_state_;
|
74 | 74 | }
|
75 | 75 | }
|
76 | 76 | }
|
77 | 77 |
|
78 |
| - thread_->Invoke<void>(RTC_FROM_HERE, [this] { |
79 |
| - capturer_->Start(this); |
80 |
| - }); |
| 78 | + thread_->Invoke<void>(RTC_FROM_HERE, [this] { capturer_->Start(this); }); |
81 | 79 | capture_state_ = CS_RUNNING;
|
82 |
| - thread_->PostTask(ToQueuedTask( |
83 |
| - [this]{ |
84 |
| - CaptureFrame(); |
85 |
| - })); |
| 80 | + thread_->PostTask(ToQueuedTask([this] { CaptureFrame(); })); |
86 | 81 | [delegate_ didSourceCaptureStart];
|
87 | 82 | return capture_state_;
|
88 | 83 | }
|
|
97 | 92 | }
|
98 | 93 |
|
99 | 94 | void ObjCDesktopCapturer::OnCaptureResult(webrtc::DesktopCapturer::Result result,
|
100 |
| - std::unique_ptr<webrtc::DesktopFrame> frame) { |
| 95 | + std::unique_ptr<webrtc::DesktopFrame> frame) { |
101 | 96 | if (result != result_) {
|
102 | 97 | if (result == webrtc::DesktopCapturer::Result::ERROR_PERMANENT) {
|
103 | 98 | [delegate_ didSourceCaptureError];
|
|
118 | 113 | }
|
119 | 114 |
|
120 | 115 | if (result == webrtc::DesktopCapturer::Result::ERROR_TEMPORARY) {
|
121 |
| - return; |
| 116 | + return; |
122 | 117 | }
|
123 | 118 |
|
124 | 119 | int width = frame->size().width();
|
125 | 120 | int height = frame->size().height();
|
126 | 121 | int real_width = width;
|
127 | 122 |
|
128 |
| - if(type_ == kWindow) { |
| 123 | + if (type_ == kWindow) { |
129 | 124 | int multiple = 0;
|
130 | 125 | #if defined(WEBRTC_ARCH_X86_FAMILY)
|
131 | 126 | multiple = 16;
|
|
134 | 129 | #endif
|
135 | 130 | // A multiple of $multiple must be used as the width of the src frame,
|
136 | 131 | // and the right black border needs to be cropped during conversion.
|
137 |
| - if( multiple != 0 && (width % multiple) != 0 ) { |
| 132 | + if (multiple != 0 && (width % multiple) != 0) { |
138 | 133 | width = (width / multiple + 1) * multiple;
|
139 | 134 | }
|
140 | 135 | }
|
141 |
| - |
142 |
| - if (!i420_buffer_ || !i420_buffer_.get() || |
143 |
| - i420_buffer_->width() * i420_buffer_->height() != real_width * height) { |
144 |
| - i420_buffer_ = webrtc::I420Buffer::Create(real_width, height); |
145 |
| - } |
146 | 136 |
|
147 |
| - libyuv::ConvertToI420(frame->data(), |
148 |
| - 0, |
149 |
| - i420_buffer_->MutableDataY(), |
150 |
| - i420_buffer_->StrideY(), |
151 |
| - i420_buffer_->MutableDataU(), |
152 |
| - i420_buffer_->StrideU(), |
153 |
| - i420_buffer_->MutableDataV(), |
154 |
| - i420_buffer_->StrideV(), |
| 137 | + CVPixelBufferRef pixelBuffer = NULL; |
| 138 | + |
| 139 | + NSDictionary *pixelAttributes = @{(NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{}}; |
| 140 | + CVReturn res = CVPixelBufferCreate(kCFAllocatorDefault, |
| 141 | + width, |
| 142 | + height, |
| 143 | + kCVPixelFormatType_32BGRA, |
| 144 | + (__bridge CFDictionaryRef)(pixelAttributes), |
| 145 | + &pixelBuffer); |
| 146 | + CVPixelBufferLockBaseAddress(pixelBuffer, 0); |
| 147 | + uint8_t *pxdata = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer); |
| 148 | + libyuv::ConvertToARGB(reinterpret_cast<uint8_t *>(frame->data()), |
| 149 | + real_width * height * 4, |
| 150 | + reinterpret_cast<uint8_t *>(pxdata), |
| 151 | + width * 4, |
155 | 152 | 0,
|
156 | 153 | 0,
|
157 | 154 | width,
|
|
160 | 157 | height,
|
161 | 158 | libyuv::kRotate0,
|
162 | 159 | libyuv::FOURCC_ARGB);
|
| 160 | + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); |
| 161 | + |
| 162 | + if (res != kCVReturnSuccess) { |
| 163 | + NSLog(@"Unable to create cvpixelbuffer %d", res); |
| 164 | + return; |
| 165 | + } |
| 166 | + |
| 167 | + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = |
| 168 | + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; |
163 | 169 | NSTimeInterval timeStampSeconds = CACurrentMediaTime();
|
164 | 170 | int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC);
|
165 |
| - RTCVideoFrame* rtc_video_frame = |
166 |
| - ToObjCVideoFrame( |
167 |
| - webrtc::VideoFrame::Builder() |
168 |
| - .set_video_frame_buffer(i420_buffer_) |
169 |
| - .set_rotation(webrtc::kVideoRotation_0) |
170 |
| - .set_timestamp_us(timeStampNs / 1000) |
171 |
| - .build() |
172 |
| - ); |
173 |
| - [delegate_ didCaptureVideoFrame:rtc_video_frame]; |
| 171 | + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = |
| 172 | + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer |
| 173 | + rotation:RTCVideoRotation_0 |
| 174 | + timeStampNs:timeStampNs]; |
| 175 | + |
| 176 | + [delegate_ didCaptureVideoFrame:videoFrame]; |
174 | 177 | }
|
175 | 178 |
|
176 |
| -void ObjCDesktopCapturer::OnMessage(rtc::Message* msg) { |
| 179 | +void ObjCDesktopCapturer::OnMessage(rtc::Message *msg) { |
177 | 180 | if (msg->message_id == kCaptureMessageId) {
|
178 | 181 | CaptureFrame();
|
179 | 182 | }
|
|
0 commit comments