@class AVCaptureMovieFileOutput, NSString, NSLock, AVAssetWriter, CALayer, CAAnimationGroup, NSMutableArray, NSMutableData, AVAssetWriterInput, AVPlayer; @protocol AVTRecordViewDelegate; @interface AVTRecordView : AVTView <AVTFaceTrackerDelegate> { BOOL _exportingMovie; BOOL _playBakedAnimation; BOOL _disableRendering; float _maxRecordingDuration; NSMutableData *_rawTimesData; NSMutableData *_rawBlendShapesData; NSMutableData *_rawTransformsData; NSMutableData *_rawParametersData; double _referenceAnimationBeginTime; double _recordingStartTime; int _recordedCount; int _recordingCapacity; CAAnimationGroup *_recordedAnimationGroup; AVCaptureMovieFileOutput *_movieFileOutput; AVPlayer *_audioPlayer; AVAssetWriterInput *_audioWriterInput; AVAssetWriter *_audioWriter; NSLock *_audioLock; struct { long long value; int timescale; unsigned int flags; long long epoch; } _currentAudioTime; struct { long long value; int timescale; unsigned int flags; long long epoch; } _startAudioTime; struct { long long value; int timescale; unsigned int flags; long long epoch; } _stopAudioTime; long long _recordedSampleCount; BOOL _audioIsRecording; double _lastAudioPlayerTime; double _lastAudioSystemTime; BOOL _playing; BOOL _transitioningFromSnapshot; double _t0; int _benchFrameCounter; BOOL _doubleBuffer; BOOL _checkDrawableAvailable; NSMutableArray *_droppedDoubleBufferFrames; long long _preferredFramesPerSecond_user; long long _preferredFramesPerSecond_thermal; CALayer *_backingLayer; } @property (weak, nonatomic) id<AVTRecordViewDelegate> recordDelegate; @property (readonly, nonatomic, getter=isRecording) BOOL recording; @property (readonly, nonatomic, getter=isPreviewing) BOOL previewing; @property (nonatomic) BOOL mute; @property (nonatomic) float maxRecordingDuration; @property (readonly) unsigned long long hash; @property (readonly) Class superclass; @property (readonly, copy) NSString *description; @property (readonly, copy) NSString *debugDescription; + (void)setUsesInternalTrackingPipeline:(BOOL)a0; + (BOOL)usesInternalTrackingPipeline; - (void)stopRecording; - (void)startRecording; - (id)init; - (void)dealloc; - (void)_drawAtTime:(double)a0; - (void)setPreferredFramesPerSecond:(long long)a0; - (id)initWithFrame:(struct CGRect { struct CGPoint { double x0; double x1; } x0; struct CGSize { double x0; double x1; } x1; })a0; - (BOOL)recording; - (void).cxx_destruct; - (long long)preferredFramesPerSecond; - (id)initWithCoder:(id)a0; - (void)setAvatar:(id)a0; - (void)cancelRecording; - (void)audioSessionInterruption:(id)a0; - (void)stopPlayingAudio; - (void)updateAtTime:(double)a0; - (BOOL)disableRendering; - (void)setDisableRendering:(BOOL)a0; - (id)initWithFrame:(struct CGRect { struct CGPoint { double x0; double x1; } x0; struct CGSize { double x0; double x1; } x1; })a0 options:(id)a1; - (void)setFaceTrackingPaused:(BOOL)a0; - (void)_renderer:(id)a0 updateAtTime:(double)a1; - (double)_renderer:(id)a0 inputTimeForCurrentFrameWithTime:(double)a1; - (void)playPreviewOnce; - (void)_avt_commonInit; - (void)avatarDidChange; - (void)startPreviewing; - (void)updateMuteState; - (void)_didLostTrackingForAWhile; - (void)_didUpdateAtTime:(double)a0; - (void)_playLivePreviewAnimation; - (void)_processInfoThermalStateDidChange:(id)a0; - (void)_setEffectivePreferredFramesPerSecond; - (void)_smoothRecordedData; - (id)_tmpAudioURL; - (id)_tmpMaskVideoURL; - (id)_tmpVideoURL; - (void)_updateFrameRateForThermalState:(long long)a0; - (void)_updateTrackingState; - (void)addRecordedAnimationToAvatar:(id)a0; - (BOOL)allowTrackSmoothing; - (void)audioPlayerItemDidReachEnd:(id)a0; - (void)cancelMovieExport; - (void)cancelRecordingAudio; - (void)convertRecordedDataToAnimationGroup; - (struct opaqueCMSampleBuffer { } *)createSilentAudioAtFrame:(long long)a0 nFrames:(int)a1 sampleRate:(double)a2 numChannels:(int)a3; - (double)currentAudioTime; - (void)drawableNotAvailableForTime:(double)a0; - (BOOL)exportMovieToURL:(id)a0 options:(id)a1 completionHandler:(id /* block */)a2; - (BOOL)faceIsFullyActive; - (void)faceTracker:(id)a0 session:(id)a1 didFailWithError:(id)a2; - (void)faceTracker:(id)a0 session:(id)a1 didOutputAudioSampleBuffer:(struct opaqueCMSampleBuffer { } *)a2; - (void)faceTracker:(id)a0 sessionInterruptionEnded:(id)a1; - (void)faceTracker:(id)a0 sessionWasInterrupted:(id)a1; - (void)faceTrackerDidUpdate:(id)a0 withARFrame:(id)a1; - (id)faceTrackingRecordingURL; - (void)fadePuppetToWhite:(float)a0; - (double)finalVideoDuration; - (void)finalizeAudioFile; - (BOOL)isDoubleBuffered; - (BOOL)mergeAudio:(id)a0 andVideoTo:(id)a1 error:(id *)a2; - (BOOL)playBakedAnimation; - (double)recordingDuration; - (void)removeRecordedAnimationFromAvatar:(id)a0; - (void)setFaceTrackingRecordingURL:(id)a0; - (void)setPlayBakedAnimation:(BOOL)a0; - (void)startPlayingAudio; - (void)startRecordingAudio; - (void)stopPreviewing; - (void)stopRecordingAudio; - (void)transitionToFaceTrackingWithDuration:(double)a0 style:(unsigned long long)a1 enableBakedAnimations:(BOOL)a2 completionHandler:(id /* block */)a3; - (void)trimRecordedData; - (void)updateAudioState; - (void)updateForChangedFaceTrackingPaused; @end