提问人:user1115985 提问时间:10/19/2023 最后编辑:HangarRashuser1115985 更新时间:10/20/2023 访问量:52
iOS 17 RPSystemBroadcastPickerView 不工作
iOS 17 RPSystemBroadcastPickerView not working
问:
我现有的代码在 iOS < 17 设备中正常工作,它会同时录制 iPhone 屏幕和音频,但在 iOS 17 设备中,屏幕录制视频仅捕获 2 秒,然后自动停止,因为它是一个扩展,我没有日志来调试问题。
我已经在其他 iPhone 和少于 17 的操作系统中测试了相同的代码,它工作正常,但在 iOS 17 设备中,这个问题即将到来。
@try {
NSLog(@“initAssesWriter”);
NSError *error = nil;
CGRect screenRect = [[UIScreen mainScreen] bounds];
_videoWriter = [[AVAssetWriter alloc] initWithURL:
_filePath fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(_videoWriter);
//Configure video
NSDictionary* videoCompressionProps = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithDouble:2048*1024.0], AVVideoAverageBitRateKey,
nil ];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecTypeH264, AVVideoCodecKey,
[NSNumber numberWithInt:screenRect.size.width * 4], AVVideoWidthKey,
[NSNumber numberWithInt:screenRect.size.height * 4], AVVideoHeightKey,
videoCompressionProps, AVVideoCompressionPropertiesKey,
nil];
_writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings] ;
_writerInput.expectsMediaDataInRealTime = YES;
NSParameterAssert(_writerInput);
NSParameterAssert([_videoWriter canAddInput:_writerInput]);
[_videoWriter addInput:_writerInput];
AudioChannelLayout acl;
bzero( &acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSData dataWithBytes: &acl length: sizeof( AudioChannelLayout ) ], AVChannelLayoutKey,
[ NSNumber numberWithInt: 64000 ], AVEncoderBitRateKey,
nil];
_audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType: AVMediaTypeAudio
outputSettings: audioOutputSettings ];
_audioWriterInput.expectsMediaDataInRealTime = YES; // seems to work slightly better
NSParameterAssert(_audioWriterInput);
NSParameterAssert([_videoWriter canAddInput:_audioWriterInput]);
[_videoWriter addInput:_audioWriterInput];
[_videoWriter setMovieFragmentInterval:CMTimeMake(1, 600)];
[_videoWriter startWriting];
} @catch (NSException *exception) {
} @finally {
}
-(void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType{
@try {
if(!_isRecordingStarted){
[_videoWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
_isRecordingStarted = YES;
[self saveFlurryLogs:@"Assest writer Start Recording" Details:@""];
NSLog(@"CMSampleBufferGetPresentationTimeStamp");
}
} @catch (NSException *exception) {
[self saveFlurryLogs:@"Recording Start Execption" Details:exception.description];
} @finally {
}
@try {
switch (sampleBufferType) {
case RPSampleBufferTypeVideo:
// Handle video sample buffer
if([_writerInput isReadyForMoreMediaData]){
[_writerInput appendSampleBuffer:sampleBuffer];
NSLog(@"writing matadata Video");
}
break;
case RPSampleBufferTypeAudioApp:
// Handle audio sample buffer for app audio
break;
case RPSampleBufferTypeAudioMic:
if([_audioWriterInput isReadyForMoreMediaData]){
[_audioWriterInput appendSampleBuffer:sampleBuffer];
NSLog(@"writing matadata Audio");
}
// Handle audio sample buffer for mic audio
break;
default:
break;
}
} @catch (NSException *exception) {
[self saveFlurryLogs:@"Packet Write Execption" Details:exception.description];
} @finally {
}
}
答: 暂无答案
评论