录制音视频消息
切换音视频模式
1
2
3
4
5
| self.actionButtons.micButton.switchMode = { [weak self] in
if let strongSelf = self, let interfaceInteraction = strongSelf.interfaceInteraction {
interfaceInteraction.switchMediaRecordingMode()
}
}
|
1
| let actionButtons: ChatTextInputActionButtonsNode
|
1
| let micButton: ChatTextInputMediaRecordingButton
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
| var videoRecordingStatus: InstantVideoControllerRecordingStatus? {
didSet {
if self.videoRecordingStatus !== oldValue {
if self.micLevelDisposable == nil {
micLevelDisposable = MetaDisposable()
}
if let videoRecordingStatus = self.videoRecordingStatus {
self.micLevelDisposable?.set(videoRecordingStatus.micLevel.start(next: { [weak self] level in
Queue.mainQueue().async {
//self?.recordingOverlay?.addImmediateMicLevel(CGFloat(level))
self?.addMicLevel(CGFloat(level))
}
}))
} else if self.audioRecorder == nil {
self.micLevelDisposable?.set(nil)
}
self.hasRecorder = self.audioRecorder != nil || self.videoRecordingStatus != nil
}
}
}
|
1
2
3
4
5
6
7
8
9
10
11
12
13
| self.actionButtons.micButton.beginRecording = { [weak self] in
if let strongSelf = self, let presentationInterfaceState = strongSelf.presentationInterfaceState, let interfaceInteraction =
strongSelf.interfaceInteraction {
let isVideo: Bool
switch presentationInterfaceState.interfaceState.mediaRecordingMode {
case .audio:
isVideo = false
case .video:
isVideo = true
}
interfaceInteraction.beginMediaRecording(isVideo)
}
}
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
| beginMediaRecording: { [weak self] isVideo in
guard let strongSelf = self else {
return
}
strongSelf.mediaRecordingModeTooltipController?.dismiss()
let requestId = strongSelf.beginMediaRecordingRequestId
let begin: () -> Void = {
guard let strongSelf = self, strongSelf.beginMediaRecordingRequestId == requestId else {
return
}
guard checkAvailableDiskSpace(context: strongSelf.context, push: { [weak self] c in
self?.push(c)
}) else {
return
}
let hasOngoingCall: Signal<Bool, NoError> = strongSelf.context.sharedContext.hasOngoingCall.get()
let _ = (hasOngoingCall
|> deliverOnMainQueue).start(next: { hasOngoingCall in
guard let strongSelf = self, strongSelf.beginMediaRecordingRequestId == requestId else {
return
}
if hasOngoingCall {
strongSelf.present(textAlertController(context: strongSelf.context, title: strongSelf.presentationData.strings.Call_CallInProgressTitle, text: strongSelf.presentationData.strings.Call_RecordingDisabledMessage, actions: [TextAlertAction(type: .defaultAction, title: strongSelf.presentationData.strings.Common_OK, action: {
})]), in: .window(.root))
} else {
if isVideo {
strongSelf.requestVideoRecorder()
} else {
strongSelf.requestAudioRecorder(beginWithTone: false)
}
}
})
}
DeviceAccess.authorizeAccess(to: .microphone(isVideo ? .video : .audio), presentationData: strongSelf.presentationData, present: { c, a in
self?.present(c, in: .window(.root), with: a)
}, openSettings: {
self?.context.sharedContext.applicationBindings.openSettings()
}, { granted in
guard let strongSelf = self, granted else {
return
}
if isVideo {
DeviceAccess.authorizeAccess(to: .camera(.video), presentationData: strongSelf.presentationData, present: { c, a in
self?.present(c, in: .window(.root), with: a)
}, openSettings: {
self?.context.sharedContext.applicationBindings.openSettings()
}, { granted in
if granted {
begin()
}
})
} else {
begin()
}
})
}
|
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
| private func requestVideoRecorder() {
let peerId = self.chatLocation.peerId
if self.videoRecorderValue == nil {
if let currentInputPanelFrame = self.chatDisplayNode.currentInputPanelFrame() {
if self.recorderFeedback == nil {
self.recorderFeedback = HapticFeedback()
self.recorderFeedback?.prepareImpact(.light)
}
var isScheduledMessages = false
if case .scheduledMessages = self.presentationInterfaceState.subject {
isScheduledMessages = true
}
self.videoRecorder.set(.single(legacyInstantVideoController(theme: self.presentationData.theme, panelFrame: self.view.convert(currentInputPanelFrame, to: nil), context: self.context, peerId: peerId, slowmodeState: !isScheduledMessages ? self.presentationInterfaceState.slowmodeState : nil, hasSchedule: !isScheduledMessages && peerId.namespace != Namespaces.Peer.SecretChat, send: { [weak self] videoController, message in
if let strongSelf = self {
guard let message = message else {
strongSelf.videoRecorder.set(.single(nil))
return
}
let replyMessageId = strongSelf.presentationInterfaceState.interfaceState.replyMessageId
let correlationId = Int64.random(in: 0 ..< Int64.max)
let updatedMessage = message
.withUpdatedReplyToMessageId(replyMessageId)
.withUpdatedCorrelationId(correlationId)
var usedCorrelationId = false
if strongSelf.chatDisplayNode.shouldAnimateMessageTransition, let extractedView = videoController.extractVideoSnapshot() {
usedCorrelationId = true
strongSelf.chatDisplayNode.messageTransitionNode.add(correlationId: correlationId, source: .videoMessage(ChatMessageTransitionNode.Source.VideoMessage(view: extractedView)), initiated: { [weak videoController] in
videoController?.hideVideoSnapshot()
guard let strongSelf = self else {
return
}
strongSelf.videoRecorder.set(.single(nil))
})
} else {
strongSelf.videoRecorder.set(.single(nil))
}
strongSelf.chatDisplayNode.setupSendActionOnViewUpdate({
if let strongSelf = self {
strongSelf.updateChatPresentationInterfaceState(animated: true, interactive: false, {
$0.updatedInterfaceState { $0.withUpdatedReplyMessageId(nil) }
})
}
}, usedCorrelationId ? correlationId : nil)
strongSelf.sendMessages([updatedMessage])
}
}, displaySlowmodeTooltip: { [weak self] node, rect in
self?.interfaceInteraction?.displaySlowmodeTooltip(node, rect)
}, presentSchedulePicker: { [weak self] done in
if let strongSelf = self {
strongSelf.presentScheduleTimePicker(completion: { [weak self] time in
if let strongSelf = self {
done(time)
if strongSelf.presentationInterfaceState.subject != .scheduledMessages && time != scheduleWhenOnlineTimestamp {
strongSelf.openScheduledMessages()
}
}
})
}
})))
}
}
}
|
1
| private var videoRecorder = Promise<InstantVideoController?>()
|
1
| func legacyInstantVideoController(theme: PresentationTheme, panelFrame: CGRect, context: AccountContext, peerId: PeerId, slowmodeState: ChatSlowmodeState?, hasSchedule: Bool, send: @escaping (InstantVideoController, EnqueueMessage?) -> Void, displaySlowmodeTooltip: @escaping (ASDisplayNode, CGRect) -> Void, presentSchedulePicker: @escaping (@escaping (Int32) -> Void) -> Void) -> InstantVideoController
|
TGVideoMessageCaptureController
TGVideoMessageControls *_controlsView;
TGVideoCameraGLView *_previewView;
TGVideoMessageRingView *_ringView;
UIView *_blurView;
UIView *_wrapperView;
TGModernButton *_switchButton;
TGVideoMessageShimmerView *_shimmerView;
TGVideoCameraPipeline
@interface TGVideoCameraPipeline : NSObject
@property (nonatomic, assign) AVCaptureVideoOrientation orientation;
@property (nonatomic, assign) bool renderingEnabled;
@property (nonatomic, readonly) NSTimeInterval videoDuration;
@property (nonatomic, readonly) CGAffineTransform videoTransform;
@property (nonatomic, readonly) bool isRecording;
@property (nonatomic, copy) void (^micLevel)(CGFloat);
@property (nonatomic, readonly) bool isZoomAvailable;
@property (nonatomic, assign) CGFloat zoomLevel;
- (void)cancelZoom;
- (instancetype)initWithDelegate:(id<TGVideoCameraPipelineDelegate>)delegate position:(AVCaptureDevicePosition)position callbackQueue:(dispatch_queue_t)queue liveUploadInterface:(id<TGLiveUploadInterface>)liveUploadInterface;
- (void)startRunning;
- (void)stopRunning;
- (void)startRecording:(NSURL *)url preset:(TGMediaVideoConversionPreset)preset liveUpload:(bool)liveUpload;
- (void)stopRecording:(void (^)(bool))completed;
- (CGAffineTransform)transformForOrientation:(AVCaptureVideoOrientation)orientation;
- (void)setCameraPosition:(AVCaptureDevicePosition)position;
+ (bool)cameraPositionChangeAvailable;
@end
TGVideoCameraGLRenderer *_renderer;
TGVideoCameraMovieRecorder *_recorder;
__weak id<TGVideoCameraPipelineDelegate> _delegate;
TGMediaVideoConversionPreset _preset;
id<TGLiveUploadInterface> _watcher;
CVPixelBufferRef _currentPreviewPixelBuffer;
NSMutableDictionary *_thumbnails;
NSTimeInterval _firstThumbnailTime;
NSTimeInterval _previousThumbnailTime;
id<TGLiveUploadInterface> _liveUploadInterface;
int16_t _micLevelPeak;
bool _running;
bool _startCaptureSessionOnEnteringForeground;
id _applicationWillEnterForegroundObserver;
TGVideoCameraPipelineDelegate
@protocol TGVideoCameraPipelineDelegate <NSObject>
@required
- (void)capturePipeline:(TGVideoCameraPipeline *)capturePipeline didStopRunningWithError:(NSError *)error;
- (void)capturePipeline:(TGVideoCameraPipeline *)capturePipeline previewPixelBufferReadyForDisplay:(CVPixelBufferRef)previewPixelBuffer;
- (void)capturePipelineDidRunOutOfPreviewBuffers:(TGVideoCameraPipeline *)capturePipeline;
- (void)capturePipelineRecordingDidStart:(TGVideoCameraPipeline *)capturePipeline;
- (void)capturePipeline:(TGVideoCameraPipeline *)capturePipeline recordingDidFailWithError:(NSError *)error;
- (void)capturePipelineRecordingWillStop:(TGVideoCameraPipeline *)capturePipeline;
- (void)capturePipelineRecordingDidStop:(TGVideoCameraPipeline *)capturePipeline duration:(NSTimeInterval)duration liveUploadData:(id)liveUploadData thumbnailImage:(UIImage *)thumbnailImage thumbnails:(NSDictionary *)thumbnails;
@end
TGLiveUploadInterface
@protocol TGLiveUploadInterface <NSObject>
- (void)setupWithFileURL:(NSURL *)fileURL;
- (id)fileUpdated:(bool)completed;
@end
TGVideoCameraGLRenderer: 这个是真正做渲染的
@interface TGVideoCameraGLRenderer : NSObject
@property (nonatomic, readonly) __attribute__((NSObject)) CMFormatDescriptionRef outputFormatDescription;
@property (nonatomic, assign) AVCaptureVideoOrientation orientation;
@property (nonatomic, assign) bool mirror;
@property (nonatomic, assign) CGFloat opacity;
@property (nonatomic, readonly) bool hasPreviousPixelbuffer;
- (void)prepareForInputWithFormatDescription:(CMFormatDescriptionRef)inputFormatDescription outputRetainedBufferCountHint:(size_t)outputRetainedBufferCountHint;
- (void)reset;
- (CVPixelBufferRef)copyRenderedPixelBuffer:(CVPixelBufferRef)pixelBuffer;
- (void)setPreviousPixelBuffer:(CVPixelBufferRef)previousPixelBuffer;
@end
TGVideoCameraPipeline - setupCaptureSession
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
| - (void)setupCaptureSession
{
if (_captureSession != nil)
return;
_captureSession = [[AVCaptureSession alloc] init];
_captureSession.automaticallyConfiguresApplicationAudioSession = false;
_captureSession.usesApplicationAudioSession = true;
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(captureSessionNotification:) name:nil object:_captureSession];
_applicationWillEnterForegroundObserver = [[NSNotificationCenter defaultCenter] addObserverForName:UIApplicationWillEnterForegroundNotification object:[[LegacyComponentsGlobals provider] applicationInstance] queue:nil usingBlock:^(__unused NSNotification *note)
{
[self applicationWillEnterForeground];
}];
_audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
_audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:_audioDevice error:nil];
if ([_captureSession canAddInput:_audioInput])
[_captureSession addInput:_audioInput];
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
_audioDataOutputQueue = dispatch_queue_create("org.telegram.VideoCameraPipeline.audio", DISPATCH_QUEUE_SERIAL);
[_audioOutput setSampleBufferDelegate:self queue:_audioDataOutputQueue];
if ([_captureSession canAddOutput:_audioOutput])
[_captureSession addOutput:_audioOutput];
_audioConnection = [_audioOutput connectionWithMediaType:AVMediaTypeAudio];
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
for (AVCaptureDevice *device in devices)
{
if (device.position == _preferredPosition)
{
videoDevice = device;
break;
}
}
_renderer.mirror = (videoDevice.position == AVCaptureDevicePositionFront);
_renderer.orientation = _orientation;
NSError *videoDeviceError = nil;
_videoInput = [[AVCaptureDeviceInput alloc] initWithDevice:videoDevice error:&videoDeviceError];
if ([_captureSession canAddInput:_videoInput])
{
[_captureSession addInput:_videoInput];
_videoDevice = videoDevice;
}
else
{
[self handleNonRecoverableCaptureSessionRuntimeError:videoDeviceError];
return;
}
_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
_videoOutput.alwaysDiscardsLateVideoFrames = false;
_videoOutput.videoSettings = @{ (id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) };
[_videoOutput setSampleBufferDelegate:self queue:_videoDataOutputQueue];
if ([_captureSession canAddOutput:_videoOutput])
[_captureSession addOutput:_videoOutput];
_videoConnection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo];
if ([_captureSession canSetSessionPreset:AVCaptureSessionPreset640x480])
_captureSession.sessionPreset = AVCaptureSessionPreset640x480;
else
_captureSession.sessionPreset = AVCaptureSessionPresetMedium;
[self _configureFPS];
[self _enableLowLightBoost];
[self _enableVideoStabilization];
_videoBufferOrientation = _videoConnection.videoOrientation;
}
|
1
| 最终结果是产生_videoConnection & _audioConnection, 在代理方法中通过connection输出采集到的数据
|
- (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection;
TGVideoCameraPipeline - renderVideoSampleBuffer
- (void)renderVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
CVPixelBufferRef renderedPixelBuffer = NULL;
CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
@synchronized (_renderer)
{
if (_renderingEnabled)
{
bool repeatingFrames = false;
@synchronized (self)
{
if (_recorder.paused && _previousPixelBuffer != NULL)
{
_recorder.paused = false;
_repeatingCount = 11;
[_renderer setPreviousPixelBuffer:_previousPixelBuffer];
CFRelease(_previousPixelBuffer);
_previousPixelBuffer = NULL;
}
if (_repeatingCount > 0)
{
repeatingFrames = true;
_repeatingCount--;
}
CGFloat opacity = 1.0f;
if (_repeatingCount < 10)
opacity = _repeatingCount / 9.0f;
[_renderer setOpacity:opacity];
if (_repeatingCount == 0)
[_renderer setPreviousPixelBuffer:NULL];
}
CVPixelBufferRef sourcePixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
renderedPixelBuffer = [_renderer copyRenderedPixelBuffer:sourcePixelBuffer];
@synchronized (self)
{
if (_recordingStatus == TGVideoCameraRecordingStatusRecording && _recordingThumbnail == nil)
{
UIImage *image = [self imageFromImageBuffer:sourcePixelBuffer];
_recordingThumbnail = image;
}
if (_recordingStatus == TGVideoCameraRecordingStatusRecording && !repeatingFrames)
{
NSTimeInterval currentTime = CMTimeGetSeconds(timestamp);
if (_previousThumbnailTime < DBL_EPSILON)
{
_firstThumbnailTime = currentTime;
_previousThumbnailTime = currentTime;
[self storeThumbnailWithSampleBuffer:sampleBuffer time:0.0 mirror:_renderer.mirror];
}
else
{
NSTimeInterval relativeThumbnailTime = _previousThumbnailTime - _firstThumbnailTime;
NSTimeInterval interval = MAX(0.1, relativeThumbnailTime / 10.0);
if (currentTime - _previousThumbnailTime >= interval)
{
[self storeThumbnailWithSampleBuffer:sampleBuffer time:relativeThumbnailTime mirror:_renderer.mirror];
_previousThumbnailTime = currentTime;
}
}
}
if (!repeatingFrames)
{
if (_previousPixelBuffer != NULL)
{
CFRelease(_previousPixelBuffer);
_previousPixelBuffer = NULL;
}
_previousPixelBuffer = sourcePixelBuffer;
CFRetain(sourcePixelBuffer);
}
}
}
else
{
return;
}
}
if (renderedPixelBuffer)
{
@synchronized (self)
{
[self outputPreviewPixelBuffer:renderedPixelBuffer];
if (_recordingStatus == TGVideoCameraRecordingStatusRecording)
[_recorder appendVideoPixelBuffer:renderedPixelBuffer withPresentationTime:timestamp];
}
CFRelease(renderedPixelBuffer);
}
else
{
[self videoPipelineDidRunOutOfBuffers];
}
}
TGVideoCameraPipeline - imageFromImageBuffer
- (UIImage *)imageFromImageBuffer:(CVPixelBufferRef)imageBuffer
{
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef cgImage = CGBitmapContextCreateImage(context);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
UIImage *image = [UIImage imageWithCGImage:cgImage];
CGImageRelease(cgImage);
return image;
}
TGVideoCameraMovieRecorder: 负责写数据
@interface TGVideoCameraMovieRecorder : NSObject
@property (nonatomic, assign) bool paused;
- (instancetype)initWithURL:(NSURL *)URL delegate:(id<TGVideoCameraMovieRecorderDelegate>)delegate callbackQueue:(dispatch_queue_t)queue;
- (void)addVideoTrackWithSourceFormatDescription:(CMFormatDescriptionRef)formatDescription transform:(CGAffineTransform)transform settings:(NSDictionary *)videoSettings;
- (void)addAudioTrackWithSourceFormatDescription:(CMFormatDescriptionRef)formatDescription settings:(NSDictionary *)audioSettings;
- (void)prepareToRecord;
- (void)appendVideoPixelBuffer:(CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime;
- (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- (void)finishRecording:(void(^)())completed;
- (NSTimeInterval)videoDuration;
@end
TGVideoCameraMovieRecorder - prepareToRecord
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
| - (void)prepareToRecord
{
@synchronized( self )
{
if (_status != TGMovieRecorderStatusIdle)
return;
[self transitionToStatus:TGMovieRecorderStatusPreparingToRecord error:nil completed:nil];
}
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^
{
@autoreleasepool
{
NSError *error = nil;
[[NSFileManager defaultManager] removeItemAtURL:_url error:NULL];
_assetWriter = [[AVAssetWriter alloc] initWithURL:_url fileType:AVFileTypeMPEG4 error:&error];
bool succeed = false;
if (error == nil && _videoTrackSourceFormatDescription)
{
succeed = [self setupAssetWriterVideoInputWithSourceFormatDescription:_videoTrackSourceFormatDescription transform:_videoTrackTransform settings:_videoTrackSettings];
}
if (error == nil && succeed && _audioTrackSourceFormatDescription)
{
succeed = [self setupAssetWriterAudioInputWithSourceFormatDescription:_audioTrackSourceFormatDescription settings:_audioTrackSettings];
}
if (error == nil && succeed)
{
if (![_assetWriter startWriting])
error = _assetWriter.error;
}
@synchronized (self)
{
if (error || !succeed)
[self transitionToStatus:TGMovieRecorderStatusFailed error:error completed:nil];
else
[self transitionToStatus:TGMovieRecorderStatusRecording error:nil completed:nil];
}
}
} );
}
|
_assetWriter = [[AVAssetWriter alloc] initWithURL:_url fileType:AVFileTypeMPEG4 error:&error];
TGVideoMessageCaptureController
1
2
3
4
5
6
7
8
9
10
11
|
@property (nonatomic, copy) id (^requestActivityHolder)();
@property (nonatomic, copy) void (^micLevel)(CGFloat level);
@property (nonatomic, copy) void (^onDuration)(NSTimeInterval duration);
@property (nonatomic, copy) void(^finishedWithVideo)(NSURL *videoURL, UIImage *previewImage, NSUInteger fileSize, NSTimeInterval duration, CGSize dimensions, id liveUploadData, TGVideoEditAdjustments *adjustments, bool, int32_t);
@property (nonatomic, copy) void(^onDismiss)(bool isAuto, bool isCancelled);
@property (nonatomic, copy) void(^onStop)(void);
@property (nonatomic, copy) void(^onCancel)(void);
@property (nonatomic, copy) void(^didDismiss)(void);
@property (nonatomic, copy) void(^displaySlowmodeTooltip)(void);
@property (nonatomic, copy) void (^presentScheduleController)(void (^)(int32_t));
|