Last active
November 25, 2025 02:48
-
-
Save g-l-i-t-c-h-o-r-s-e/537353cf1e39315defef9c32b23a0c40 to your computer and use it in GitHub Desktop.
Export GL Scene as Video with FFmpeg in Quartz Composer
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env bash | |
| set -euo pipefail | |
| shopt -s nullglob | |
| # --- config (SCENE CONSUMER PLUGIN) --- | |
| NAME="FFExportScene" | |
| CLASS="FFExportScenePlugIn" | |
| SRC="${SRC:-${CLASS}.m}" | |
| PLUG="$NAME.plugin" | |
| OUT="$(pwd)/build-manual-scene" | |
| INST="$HOME/Library/Graphics/Quartz Composer Plug-Ins" | |
| XCODE_APP="${XCODE_APP:-/Applications/Xcode_9.4.1.app}" | |
| DEV="$XCODE_APP/Contents/Developer" | |
| SDKDIR="$DEV/Platforms/MacOSX.platform/Developer/SDKs" | |
| SDK="${SDK:-}" | |
| if [[ -z "${SDK}" ]]; then | |
| if [[ -d "$SDKDIR/MacOSX10.14.sdk" ]]; then SDK="$SDKDIR/MacOSX10.14.sdk" | |
| elif [[ -d "$SDKDIR/MacOSX10.13.sdk" ]]; then SDK="$SDKDIR/MacOSX10.13.sdk" | |
| else SDK="$(xcrun --sdk macosx --show-sdk-path 2>/dev/null || true)" | |
| fi | |
| fi | |
| [[ -d "$DEV" ]] || { echo "Xcode not found: $XCODE_APP"; exit 1; } | |
| [[ -f "$SRC" ]] || { echo "Source not found: $SRC"; exit 1; } | |
| [[ -n "$SDK" && -d "$SDK" ]] || { echo "macOS SDK not found."; exit 1; } | |
| export DEVELOPER_DIR="$DEV" | |
| # --- FFmpeg via MacPorts pkg-config --- | |
| PKGCFG="/opt/local/bin/pkg-config" | |
| [[ -x "$PKGCFG" ]] || { echo "pkg-config not found at $PKGCFG (install via MacPorts)"; exit 1; } | |
| PKG_LIBS=(libavformat libavcodec libavutil libswscale) | |
| CFLAGS_FFMPEG="$("$PKGCFG" --cflags "${PKG_LIBS[@]}")" | |
| LIBS_FFMPEG="$("$PKGCFG" --libs "${PKG_LIBS[@]}")" | |
| echo "Using SDK: $SDK" | |
| rm -rf "$OUT" | |
| mkdir -p "$OUT/x86_64" "$OUT/universal/$PLUG/Contents/MacOS" "$OUT/universal/$PLUG/Contents/Frameworks" | |
| FRAMEWORKS="$OUT/universal/$PLUG/Contents/Frameworks" | |
| if [[ -d "$INST/$PLUG" ]]; then | |
| echo "Removing installed $INST/$PLUG" | |
| rm -rf "$INST/$PLUG" | |
| fi | |
| COMMON_CFLAGS=( | |
| -bundle -fobjc-arc -fobjc-link-runtime | |
| -isysroot "$SDK" | |
| -mmacosx-version-min=10.9 | |
| -I . | |
| -I /opt/local/include | |
| ) | |
| COMMON_LIBS=( | |
| -framework Foundation | |
| -framework Quartz | |
| -framework OpenGL | |
| -framework CoreGraphics | |
| ) | |
| echo "Compiling x86_64 (FFmpeg scene export)…" | |
| clang -arch x86_64 \ | |
| "${COMMON_CFLAGS[@]}" \ | |
| $CFLAGS_FFMPEG \ | |
| "$SRC" \ | |
| "${COMMON_LIBS[@]}" \ | |
| $LIBS_FFMPEG \ | |
| -o "$OUT/x86_64/$NAME" | |
| # Layout bundle | |
| cp -a "$OUT/x86_64/$NAME" "$OUT/universal/$PLUG/Contents/MacOS/$NAME" | |
| # Info.plist (NOTE: separate identifier just for the scene plug-in) | |
| cat >"$OUT/universal/$PLUG/Contents/Info.plist" <<PLIST | |
| <?xml version="1.0" encoding="UTF-8"?> | |
| <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> | |
| <plist version="1.0"><dict> | |
| <key>CFBundleDevelopmentRegion</key> <string>English</string> | |
| <key>CFBundleExecutable</key> <string>${NAME}</string> | |
| <key>CFBundleIdentifier</key> <string>com.yourdomain.${NAME}</string> | |
| <key>CFBundleInfoDictionaryVersion</key> <string>6.0</string> | |
| <key>CFBundleName</key> <string>${NAME}</string> | |
| <key>CFBundlePackageType</key> <string>BNDL</string> | |
| <key>CFBundleShortVersionString</key> <string>1.0</string> | |
| <key>CFBundleSupportedPlatforms</key> <array><string>MacOSX</string></array> | |
| <key>CFBundleVersion</key> <string>1</string> | |
| <key>QCPlugInClasses</key> | |
| <array> | |
| <string>${CLASS}</string> | |
| </array> | |
| <key>NSPrincipalClass</key> <string>QCPlugIn</string> | |
| </dict></plist> | |
| PLIST | |
| # --- helpers for embedding FFmpeg dylibs from /opt/local/lib --- | |
| mk_short_symlink_if_needed() { | |
| local base="$1" | |
| if [[ "$base" =~ ^(lib[^.]+)\.([0-9]+)\.[0-9.]+\.dylib$ ]]; then | |
| local short="${BASH_REMATCH[1]}.${BASH_REMATCH[2]}.dylib" | |
| if [[ ! -e "$FRAMEWORKS/$short" ]]; then | |
| ( cd "$FRAMEWORKS" && ln -s "$base" "$short" ) | |
| fi | |
| fi | |
| } | |
| list_opt_local_deps() { | |
| otool -L "$1" | awk '$1 ~ /^\/opt\/local\/lib\// {print $1}' | |
| } | |
| copy_and_rewrite() { | |
| local src="$1"; [[ "$src" == /opt/local/lib/* ]] || return 0 | |
| local base dest; base="$(basename "$src")"; dest="$FRAMEWORKS/$base" | |
| if [[ ! -f "$dest" ]]; then | |
| echo " → Copy $base" | |
| rsync -aL "$src" "$dest" | |
| chmod u+w "$dest" | |
| install_name_tool -id "@loader_path/$base" "$dest" | |
| mk_short_symlink_if_needed "$base" | |
| while IFS= read -r dep; do | |
| local depbase; depbase="$(basename "$dep")" | |
| copy_and_rewrite "$dep" | |
| install_name_tool -change "$dep" "@loader_path/$depbase" "$dest" | |
| done < <(list_opt_local_deps "$dest") | |
| fi | |
| } | |
| seed_from_otool() { | |
| local bin="$1" | |
| while IFS= read -r path; do copy_and_rewrite "$path"; done < <( | |
| otool -L "$bin" | awk '$1 ~ /^\/opt\/local\/lib\/lib(avformat|avcodec|avutil|swscale).*\.dylib$/ {print $1}' | |
| ) | |
| } | |
| seed_from_pkgconfig() { | |
| for pc in "${PKG_LIBS[@]}"; do | |
| local libdir; libdir="$("$PKGCFG" --variable=libdir "$pc" 2>/dev/null || echo /opt/local/lib)" | |
| local cand | |
| for cand in "$libdir/${pc}".*.dylib "$libdir/${pc}.dylib"; do | |
| [[ -f "$cand" ]] && { copy_and_rewrite "$cand"; break; } | |
| done | |
| done | |
| } | |
| final_full_sweep() { | |
| for lib in "$FRAMEWORKS"/*.dylib; do | |
| while IFS= read -r dep; do | |
| local depbase; depbase="$(basename "$dep")" | |
| copy_and_rewrite "$dep" | |
| install_name_tool -change "$dep" "@loader_path/$depbase" "$lib" | |
| done < <(list_opt_local_deps "$lib") | |
| done | |
| } | |
| echo "Embedding FFmpeg dylibs…" | |
| BIN="$OUT/universal/$PLUG/Contents/MacOS/$NAME" | |
| seed_from_otool "$BIN" | |
| if ! compgen -G "$FRAMEWORKS/*.dylib" >/dev/null; then | |
| seed_from_pkgconfig | |
| fi | |
| while IFS= read -r dep; do | |
| base="$(basename "$dep")" | |
| if [[ ! -e "$FRAMEWORKS/$base" ]]; then | |
| stem="${base%.dylib}" | |
| stem="${stem%.*}" | |
| match=( "$FRAMEWORKS/$stem".*.dylib ) | |
| if [[ -e "${match[0]}" ]]; then | |
| mk_short_symlink_if_needed "$(basename "${match[0]}")" | |
| else | |
| copy_and_rewrite "$dep" | |
| fi | |
| fi | |
| install_name_tool -change "$dep" "@loader_path/../Frameworks/$base" "$BIN" | |
| done < <(list_opt_local_deps "$BIN") | |
| final_full_sweep | |
| echo "Codesigning bundled libs…" | |
| for lib in "$FRAMEWORKS"/*.dylib; do | |
| codesign --force -s - "$lib" >/dev/null || true | |
| done | |
| codesign --force -s - "$OUT/universal/$PLUG" >/dev/null || true | |
| echo "Installing to: $INST" | |
| mkdir -p "$INST" | |
| rsync -a "$OUT/universal/$PLUG" "$INST/" | |
| echo "Verifying install…" | |
| IBIN="$INST/$PLUG/Contents/MacOS/$NAME" | |
| leaks=0 | |
| if otool -L "$IBIN" | awk '$1 ~ /^\/opt\/local\/lib\//' | grep -q .; then | |
| echo "❌ main binary still references /opt/local/lib:" | |
| otool -L "$IBIN" | awk '$1 ~ /^\/opt\/local\/lib\// {print " " $1}' | |
| leaks=1 | |
| fi | |
| for lib in "$INST/$PLUG/Contents/Frameworks/"*.dylib; do | |
| if otool -L "$lib" | awk '$1 ~ /^\/opt\/local\/lib\//' | grep -q .; then | |
| echo "❌ $(basename "$lib") still references /opt/local/lib:" | |
| otool -L "$lib" | awk '$1 ~ /^\/opt\/local\/lib\// {print " " $1}' | |
| leaks=1 | |
| fi | |
| done | |
| if [[ $leaks -ne 0 ]]; then | |
| echo "Fixup failed; see above offending paths." | |
| exit 1 | |
| fi | |
| echo "Installed: $INST/$PLUG" | |
| echo "Embedded libs:" | |
| ls -1 "$INST/$PLUG/Contents/Frameworks" || true | |
| echo "Relaunch Quartz Composer and look for 'FFExport Scene (x86_64)'." |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| // FFExportScenePlugIn.m — FFmpeg OpenGL scene exporter (CONSUMER) for Quartz Composer (Mojave/ARC, 64-bit) | |
| // Place this patch in the top layer; it captures the rendered OpenGL scene below it. | |
| // | |
| // Inputs: | |
| // Output Path (string) | |
| // Record (bool toggle; start/stop & finalize) | |
| // Pause (bool toggle; pause encoding, keep file open) | |
| // Duration (sec) (number; 0 = unlimited; based on encoded frames / FPS) | |
| // FPS (number; default 30) | |
| // Limit to FPS (bool; when ON, capture at most FPS frames/sec; PTS still monotonic frame counter) | |
| // Codec Options (string; e.g. "-c:v libx264 -g 120 -bf 3 -s 1280x720 -preset veryfast -crf 18 -pix_fmt yuv444p") | |
| // | |
| // Notes: | |
| // • For a pure, no-conversion path, you can use something like: | |
| // -c:v ffv1 -pix_fmt bgra -level 3 -coder 1 | |
| // (we bypass swscale if encoder pix_fmt == BGRA and size is unchanged) | |
| // • CPU-heavy codecs are encoded on a serial GCD queue; QC thread never blocks. | |
| // If the encoder can’t keep up, frames are dropped only after a sizable backlog, | |
| // instead of immediately, so 60 fps exports stay much smoother. | |
| // | |
| // Link with: | |
| // -framework Foundation -framework Quartz -framework OpenGL -framework CoreGraphics | |
| // FFmpeg 4.4.x: avformat,avcodec,avutil,swscale | |
| #import <Quartz/Quartz.h> | |
| #import <CoreGraphics/CoreGraphics.h> | |
| #import <OpenGL/OpenGL.h> | |
| #import <OpenGL/gl.h> | |
| #import <OpenGL/CGLMacro.h> | |
| #include <math.h> | |
| #include <string.h> | |
| #include <stdatomic.h> | |
| #ifdef __cplusplus | |
| extern "C" { | |
| #endif | |
| #include <libavformat/avformat.h> | |
| #include <libavcodec/avcodec.h> | |
| #include <libavutil/avutil.h> | |
| #include <libavutil/opt.h> | |
| #include <libavutil/imgutils.h> | |
| #include <libavutil/dict.h> | |
| #include <libavutil/pixdesc.h> | |
| #include <libswscale/swscale.h> | |
| #ifdef __cplusplus | |
| } | |
| #endif | |
| static inline double _clamp(double v,double lo,double hi){ return v<lo?lo:(v>hi?hi:v); } | |
| @interface FFExportScenePlugIn : QCPlugIn | |
| @property(assign) NSString *inputOutputPath; | |
| @property(assign) BOOL inputRecord; | |
| @property(assign) BOOL inputPause; | |
| @property(assign) double inputDuration; | |
| @property(assign) double inputFPS; | |
| @property(assign) BOOL inputLimitFPS; | |
| @property(assign) NSString *inputCodecOptions; | |
| @end | |
| @implementation FFExportScenePlugIn | |
| { | |
| // FFmpeg state | |
| AVFormatContext *_fmt; | |
| AVStream *_vstream; | |
| AVCodecContext *_venc; | |
| struct SwsContext *_sws; | |
| AVFrame *_frame; | |
| int _width; // encoded width | |
| int _height; // encoded height | |
| int _srcWidth; // source (scene) width | |
| int _srcHeight; // source (scene) height | |
| AVRational _timeBase; | |
| double _fps; | |
| int64_t _nextPTS; // strictly monotonic frame counter (always used) | |
| int64_t _frameCount; // encoded frames (encoder thread) | |
| // Recording state | |
| BOOL _isRecording; | |
| BOOL _prevRecord; | |
| NSTimeInterval _recordStartTime; | |
| NSTimeInterval _lastTime; | |
| double _timeAccum; // accumulated QC time for FPS stepping (no-limit mode) | |
| double _durationLimit; // seconds (0 = unlimited) | |
| // Capture buffers (BGRA top-down) | |
| uint8_t *_captureBuf; | |
| size_t _captureBufSize; | |
| uint8_t *_rowTmp; | |
| size_t _rowTmpSize; | |
| CGColorSpaceRef _cs; | |
| // Async encoding | |
| dispatch_queue_t _encodeQueue; | |
| int64_t _scheduledFrames; // frames we’ve queued for encoding | |
| _Atomic int _inFlightFrames; // backlog of frames currently in the encode queue | |
| BOOL _directBGRAPath; // encoder pix_fmt == BGRA && no scale => bypass swscale | |
| BOOL _finalizing; // true while trailing/cleanup is running on encodeQueue | |
| // FPS limiting schedule | |
| double _nextCaptureTime; // next QC time at which we are allowed to capture when Limit to FPS is ON | |
| } | |
| @dynamic inputOutputPath, inputRecord, inputPause, inputDuration, inputFPS, inputLimitFPS, inputCodecOptions; | |
| + (NSDictionary *)attributes | |
| { | |
| return @{ | |
| QCPlugInAttributeNameKey: @"FFExport Scene (x86_64)", | |
| QCPlugInAttributeDescriptionKey: @"FFmpeg-based exporter that captures the OpenGL scene below it.\nPlace this as the top layer. Uses Record/Pause/Duration/FPS/Limit/Codec Options like FFExport." | |
| }; | |
| } | |
| + (NSDictionary *)attributesForPropertyPortWithKey:(NSString *)key | |
| { | |
| if ([key isEqualToString:@"inputOutputPath"]) | |
| return @{ QCPortAttributeNameKey: @"Output Path", QCPortAttributeTypeKey: QCPortTypeString, QCPortAttributeDefaultValueKey: @"" }; | |
| if ([key isEqualToString:@"inputRecord"]) | |
| return @{ QCPortAttributeNameKey: @"Record", QCPortAttributeTypeKey: QCPortTypeBoolean, QCPortAttributeDefaultValueKey: @0.0 }; | |
| if ([key isEqualToString:@"inputPause"]) | |
| return @{ QCPortAttributeNameKey: @"Pause", QCPortAttributeTypeKey: QCPortTypeBoolean, QCPortAttributeDefaultValueKey: @0.0 }; | |
| if ([key isEqualToString:@"inputDuration"]) | |
| return @{ QCPortAttributeNameKey: @"Duration (sec)", QCPortAttributeTypeKey: QCPortTypeNumber, QCPortAttributeDefaultValueKey: @0.0 }; | |
| if ([key isEqualToString:@"inputFPS"]) | |
| return @{ QCPortAttributeNameKey: @"FPS", QCPortAttributeTypeKey: QCPortTypeNumber, QCPortAttributeDefaultValueKey: @30.0 }; | |
| if ([key isEqualToString:@"inputLimitFPS"]) | |
| return @{ QCPortAttributeNameKey: @"Limit to FPS", QCPortAttributeTypeKey: QCPortTypeBoolean, QCPortAttributeDefaultValueKey: @1.0 }; | |
| if ([key isEqualToString:@"inputCodecOptions"]) | |
| return @{ QCPortAttributeNameKey: @"Codec Options", QCPortAttributeTypeKey: QCPortTypeString, QCPortAttributeDefaultValueKey: @"" }; | |
| return nil; | |
| } | |
| + (NSArray *)sortedPropertyPortKeys | |
| { | |
| return @[ | |
| @"inputOutputPath", | |
| @"inputRecord", | |
| @"inputPause", | |
| @"inputDuration", | |
| @"inputFPS", | |
| @"inputLimitFPS", | |
| @"inputCodecOptions" | |
| ]; | |
| } | |
| + (QCPlugInExecutionMode)executionMode { return kQCPlugInExecutionModeConsumer; } | |
| + (QCPlugInTimeMode) timeMode { return kQCPlugInTimeModeIdle; } | |
| + (BOOL)allowsSubpatches { return NO; } | |
| // -------------------------------------------------- | |
| // Lifecycle | |
| // -------------------------------------------------- | |
| - (id)init | |
| { | |
| if ((self = [super init])) { | |
| #ifdef kCGColorSpaceSRGB | |
| _cs = CGColorSpaceCreateWithName(kCGColorSpaceSRGB); | |
| #else | |
| _cs = CGColorSpaceCreateDeviceRGB(); | |
| #endif | |
| _fmt = NULL; | |
| _vstream = NULL; | |
| _venc = NULL; | |
| _sws = NULL; | |
| _frame = NULL; | |
| _width = _height = 0; | |
| _srcWidth = _srcHeight = 0; | |
| _fps = 30.0; | |
| _timeBase = (AVRational){1,30}; | |
| _nextPTS = 0; | |
| _frameCount = 0; | |
| _isRecording = NO; | |
| _prevRecord = NO; | |
| _recordStartTime = 0.0; | |
| _lastTime = 0.0; | |
| _timeAccum = 0.0; | |
| _durationLimit = 0.0; | |
| _captureBuf = NULL; | |
| _captureBufSize = 0; | |
| _rowTmp = NULL; | |
| _rowTmpSize = 0; | |
| _encodeQueue = dispatch_queue_create("com.yourdomain.FFExportScene.encode", DISPATCH_QUEUE_SERIAL); | |
| _scheduledFrames = 0; | |
| atomic_store(&_inFlightFrames, 0); | |
| _directBGRAPath = NO; | |
| _finalizing = NO; | |
| _nextCaptureTime = 0.0; | |
| } | |
| return self; | |
| } | |
| - (void)dealloc | |
| { | |
| [self _stopEncoding]; // async finalize/cleanup | |
| if (_cs) { CFRelease(_cs); _cs = NULL; } | |
| if (_captureBuf) { free(_captureBuf); _captureBuf = NULL; _captureBufSize = 0; } | |
| if (_rowTmp) { free(_rowTmp); _rowTmp = NULL; _rowTmpSize = 0; } | |
| } | |
| - (BOOL)startExecution:(id<QCPlugInContext>)context | |
| { | |
| av_log_set_level(AV_LOG_ERROR); | |
| return YES; | |
| } | |
| - (void)stopExecution:(id<QCPlugInContext>)context | |
| { | |
| [self _stopEncoding]; | |
| } | |
| // -------------------------------------------------- | |
| // FFmpeg helpers | |
| // -------------------------------------------------- | |
| - (void)_cleanupFFmpeg | |
| { | |
| if (_venc) { | |
| avcodec_free_context(&_venc); | |
| _venc = NULL; | |
| } | |
| if (_fmt) { | |
| if (!(_fmt->oformat->flags & AVFMT_NOFILE) && _fmt->pb) { | |
| avio_closep(&_fmt->pb); | |
| } | |
| avformat_free_context(_fmt); | |
| _fmt = NULL; | |
| } | |
| if (_sws) { | |
| sws_freeContext(_sws); | |
| _sws = NULL; | |
| } | |
| if (_frame) { | |
| av_frame_free(&_frame); | |
| _frame = NULL; | |
| } | |
| _vstream = NULL; | |
| _directBGRAPath = NO; | |
| } | |
| static void _parse_resolution(NSString *val, int *encW, int *encH) | |
| { | |
| if (!val || [val length] == 0) return; | |
| NSArray *parts = [val componentsSeparatedByString:@"x"]; | |
| if ([parts count] != 2) return; | |
| int w = [parts[0] intValue]; | |
| int h = [parts[1] intValue]; | |
| if (w > 0 && h > 0) { | |
| *encW = w; | |
| *encH = h; | |
| } | |
| } | |
| // Parse codec options string into: | |
| // - codecName (c:v / codec:v) | |
| // - gop size (g) | |
| // - max B-frames (bf) | |
| // - encode size (s) | |
| // - pixel format (pix_fmt / pixel_format) | |
| // - generic AVDictionary options (preset, tune, crf, etc.) | |
| - (void)_parseCodecOptionsString:(NSString *)opts | |
| codecName:(NSString * __strong *)outCodecName | |
| gopPtr:(int *)outGop | |
| bfPtr:(int *)outBF | |
| encWidth:(int *)outEncW | |
| encHeight:(int *)outEncH | |
| pixFmt:(enum AVPixelFormat *)outPixFmt | |
| codecOptions:(AVDictionary **)outDict | |
| { | |
| if (outCodecName) *outCodecName = nil; | |
| if (outGop) *outGop = -1; | |
| if (outBF) *outBF = -1; | |
| if (outPixFmt) *outPixFmt = AV_PIX_FMT_NONE; | |
| AVDictionary *d = NULL; | |
| if (!opts || (id)opts == [NSNull null] || [opts length] == 0) { | |
| if (outDict) *outDict = NULL; | |
| return; | |
| } | |
| NSCharacterSet *ws = [NSCharacterSet whitespaceAndNewlineCharacterSet]; | |
| NSArray<NSString*> *tokens = [opts componentsSeparatedByCharactersInSet:ws]; | |
| NSMutableArray<NSString*> *clean = [NSMutableArray arrayWithCapacity:[tokens count]]; | |
| for (NSString *t in tokens) { | |
| if ([t length] > 0) [clean addObject:t]; | |
| } | |
| for (NSUInteger i = 0; i < [clean count]; ++i) { | |
| NSString *tok = clean[i]; | |
| if (![tok hasPrefix:@"-"]) continue; | |
| NSString *key = [tok substringFromIndex:1]; | |
| NSString *val = (i + 1 < [clean count]) ? clean[i+1] : nil; | |
| NSString *plainKey = key; | |
| if ([plainKey hasSuffix:@":v"]) { | |
| plainKey = [plainKey substringToIndex:plainKey.length - 2]; | |
| } | |
| // Codec name (-c:v / -codec:v) | |
| if (([key isEqualToString:@"c:v"] || [key isEqualToString:@"codec:v"]) && val) { | |
| if (outCodecName) *outCodecName = val; | |
| i++; | |
| continue; | |
| } | |
| // GOP size (-g) | |
| if ([plainKey isEqualToString:@"g"] && val && outGop) { | |
| *outGop = [val intValue]; | |
| i++; | |
| continue; | |
| } | |
| // Max B-frames (-bf) | |
| if ([plainKey isEqualToString:@"bf"] && val && outBF) { | |
| *outBF = [val intValue]; | |
| i++; | |
| continue; | |
| } | |
| // Encode size (-s WxH) | |
| if ([plainKey isEqualToString:@"s"] && val && outEncW && outEncH) { | |
| _parse_resolution(val, outEncW, outEncH); | |
| i++; | |
| continue; | |
| } | |
| // Pixel format (-pix_fmt / -pixel_format) | |
| if (([plainKey isEqualToString:@"pix_fmt"] || [plainKey isEqualToString:@"pixel_format"]) && val && outPixFmt) { | |
| enum AVPixelFormat pf = av_get_pix_fmt([val UTF8String]); | |
| if (pf != AV_PIX_FMT_NONE) { | |
| *outPixFmt = pf; | |
| } | |
| i++; | |
| continue; // handled, don't add to dict | |
| } | |
| // Everything else goes into AVDictionary | |
| if (val) { | |
| av_dict_set(&d, [plainKey UTF8String], [val UTF8String], 0); | |
| i++; | |
| } | |
| } | |
| if (outDict) *outDict = d; | |
| } | |
| // srcW/srcH = OpenGL viewport size; -s can override encode size, -pix_fmt overrides pixel format. | |
| - (BOOL)_startEncodingWithSourceWidth:(int)srcW | |
| sourceHeight:(int)srcH | |
| fps:(double)fps | |
| path:(NSString *)path | |
| options:(NSString *)optString | |
| { | |
| if (_finalizing) { | |
| NSLog(@"[FFExportScene] Still finalizing previous recording; ignoring new start."); | |
| return NO; | |
| } | |
| [self _cleanupFFmpeg]; | |
| if (srcW <= 0 || srcH <= 0) return NO; | |
| if (fps <= 0.0) fps = 30.0; | |
| _srcWidth = srcW; | |
| _srcHeight = srcH; | |
| _fps = fps; | |
| int encW = srcW; | |
| int encH = srcH; | |
| NSString *codecName = nil; | |
| int gopSize = -1; | |
| int maxBF = -1; | |
| AVDictionary *codecOpts = NULL; | |
| enum AVPixelFormat pixFmt = AV_PIX_FMT_NONE; | |
| [self _parseCodecOptionsString:optString | |
| codecName:&codecName | |
| gopPtr:&gopSize | |
| bfPtr:&maxBF | |
| encWidth:&encW | |
| encHeight:&encH | |
| pixFmt:&pixFmt | |
| codecOptions:&codecOpts]; | |
| if (encW <= 0 || encH <= 0) { | |
| encW = srcW; | |
| encH = srcH; | |
| } | |
| _width = encW; | |
| _height = encH; | |
| int fpsInt = (int)llround(fps); | |
| if (fpsInt < 1) fpsInt = 1; | |
| _timeBase = (AVRational){1, fpsInt}; | |
| _nextPTS = 0; | |
| _frameCount = 0; | |
| _scheduledFrames = 0; | |
| atomic_store(&_inFlightFrames, 0); | |
| NSString *realPath = path; | |
| if ([realPath hasPrefix:@"file://"]) { | |
| realPath = [[NSURL URLWithString:realPath] path]; | |
| } | |
| const char *filename = [realPath fileSystemRepresentation]; | |
| AVOutputFormat *ofmt = NULL; | |
| avformat_alloc_output_context2(&_fmt, NULL, NULL, filename); | |
| if (!_fmt) { | |
| avformat_alloc_output_context2(&_fmt, NULL, "mp4", filename); | |
| } | |
| if (!_fmt) { | |
| if (codecOpts) av_dict_free(&codecOpts); | |
| return FALSE; | |
| } | |
| ofmt = _fmt->oformat; | |
| const AVCodec *codec = NULL; | |
| if (codecName && [codecName length] > 0) { | |
| codec = avcodec_find_encoder_by_name([codecName UTF8String]); | |
| } | |
| if (!codec) { | |
| codec = avcodec_find_encoder(AV_CODEC_ID_H264); | |
| } | |
| if (!codec) { | |
| if (codecOpts) av_dict_free(&codecOpts); | |
| return NO; | |
| } | |
| // Default pixel format: use encoder's first supported if none specified; fallback to yuv420p. | |
| if (pixFmt == AV_PIX_FMT_NONE) { | |
| if (codec->pix_fmts) { | |
| pixFmt = codec->pix_fmts[0]; | |
| } else { | |
| pixFmt = AV_PIX_FMT_YUV420P; | |
| } | |
| } | |
| _vstream = avformat_new_stream(_fmt, codec); | |
| if (!_vstream) { | |
| if (codecOpts) av_dict_free(&codecOpts); | |
| return NO; | |
| } | |
| _vstream->id = _fmt->nb_streams - 1; | |
| _venc = avcodec_alloc_context3(codec); | |
| if (!_venc) { | |
| if (codecOpts) av_dict_free(&codecOpts); | |
| return NO; | |
| } | |
| _venc->codec_id = codec->id; | |
| _venc->width = encW; | |
| _venc->height = encH; | |
| _venc->pix_fmt = pixFmt; | |
| _venc->time_base = _timeBase; | |
| _vstream->time_base = _timeBase; | |
| _venc->framerate = (AVRational){ fpsInt, 1 }; | |
| _venc->gop_size = (gopSize > 0 ? gopSize : fpsInt); | |
| _venc->max_b_frames = (maxBF >= 0 ? maxBF : 2); | |
| _venc->bit_rate = 8 * 1000 * 1000; // default; can be overridden by options | |
| if (ofmt->flags & AVFMT_GLOBALHEADER) { | |
| _venc->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; | |
| } | |
| // Color metadata (mostly relevant for YUV) | |
| if (_venc->pix_fmt == AV_PIX_FMT_YUV420P || | |
| _venc->pix_fmt == AV_PIX_FMT_YUV422P || | |
| _venc->pix_fmt == AV_PIX_FMT_YUV444P || | |
| _venc->pix_fmt == AV_PIX_FMT_YUV420P10LE || | |
| _venc->pix_fmt == AV_PIX_FMT_YUV444P10LE) { | |
| _venc->color_primaries = AVCOL_PRI_BT709; | |
| _venc->color_trc = AVCOL_TRC_BT709; | |
| _venc->colorspace = AVCOL_SPC_BT709; | |
| _venc->color_range = AVCOL_RANGE_MPEG; // studio | |
| } else { | |
| _venc->color_range = AVCOL_RANGE_JPEG; // full for RGB-ish | |
| } | |
| if (_venc->priv_data) { | |
| if (!av_dict_get(codecOpts, "preset", NULL, 0)) { | |
| av_dict_set(&codecOpts, "preset", "medium", 0); | |
| } | |
| if (!av_dict_get(codecOpts, "tune", NULL, 0)) { | |
| av_dict_set(&codecOpts, "tune", "animation", 0); | |
| } | |
| } | |
| if (avcodec_open2(_venc, codec, &codecOpts) < 0) { | |
| if (codecOpts) av_dict_free(&codecOpts); | |
| return NO; | |
| } | |
| if (codecOpts) av_dict_free(&codecOpts); | |
| if (avcodec_parameters_from_context(_vstream->codecpar, _venc) < 0) { | |
| return NO; | |
| } | |
| if (!(ofmt->flags & AVFMT_NOFILE)) { | |
| if (avio_open(&_fmt->pb, filename, AVIO_FLAG_WRITE) < 0) { | |
| return NO; | |
| } | |
| } | |
| if (avformat_write_header(_fmt, NULL) < 0) { | |
| return NO; | |
| } | |
| _frame = av_frame_alloc(); | |
| if (!_frame) return NO; | |
| _frame->format = _venc->pix_fmt; | |
| _frame->width = _venc->width; | |
| _frame->height = _venc->height; | |
| if (av_frame_get_buffer(_frame, 32) < 0) { | |
| return NO; | |
| } | |
| // Decide if we can bypass swscale and copy BGRA directly | |
| _directBGRAPath = (_venc->pix_fmt == AV_PIX_FMT_BGRA && | |
| _srcWidth == _width && | |
| _srcHeight == _height); | |
| if (!_directBGRAPath) { | |
| _sws = sws_getContext(_srcWidth, _srcHeight, AV_PIX_FMT_BGRA, | |
| encW, encH, _venc->pix_fmt, | |
| SWS_BICUBIC, NULL, NULL, NULL); | |
| if (!_sws) return NO; | |
| } else { | |
| _sws = NULL; | |
| } | |
| NSLog(@"[FFExportScene] Recording started: %s (%dx%d -> %dx%d @ %.3f fps, pix_fmt=%d, directBGRA=%d)", | |
| filename, _srcWidth, _srcHeight, encW, encH, _fps, (int)_venc->pix_fmt, (int)_directBGRAPath); | |
| return YES; | |
| } | |
| // Called only on the encode queue. | |
| - (BOOL)_encodeFrameWithBGRA_locked:(const uint8_t *)src | |
| rowBytes:(int)rowBytes | |
| pts:(int64_t)pts | |
| { | |
| if (!_fmt || !_venc || !_frame) return NO; | |
| if (av_frame_make_writable(_frame) < 0) return NO; | |
| if (_directBGRAPath) { | |
| // Pure copy: BGRA -> BGRA, same size, no swscale, no color conversion. | |
| uint8_t *dst = _frame->data[0]; | |
| int dstRB = _frame->linesize[0]; | |
| int copyRB = rowBytes; | |
| if (copyRB > dstRB) copyRB = dstRB; | |
| for (int y = 0; y < _srcHeight; ++y) { | |
| memcpy(dst + (size_t)y * dstRB, src + (size_t)y * rowBytes, (size_t)copyRB); | |
| } | |
| } else { | |
| // Use swscale for RGB->YUV or scaling conversions. | |
| const uint8_t *srcSlice[4] = { src, NULL, NULL, NULL }; | |
| int srcStride[4] = { rowBytes, 0, 0, 0 }; | |
| if (!_sws) return NO; | |
| sws_scale(_sws, srcSlice, srcStride, 0, _srcHeight, | |
| _frame->data, _frame->linesize); | |
| } | |
| _frame->pts = pts; | |
| int ret = avcodec_send_frame(_venc, _frame); | |
| if (ret < 0) return NO; | |
| AVPacket *pkt = av_packet_alloc(); | |
| if (!pkt) return NO; | |
| for (;;) { | |
| ret = avcodec_receive_packet(_venc, pkt); | |
| if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { | |
| break; | |
| } else if (ret < 0) { | |
| av_packet_free(&pkt); | |
| return NO; | |
| } | |
| pkt->stream_index = _vstream->index; | |
| av_packet_rescale_ts(pkt, _venc->time_base, _vstream->time_base); | |
| ret = av_interleaved_write_frame(_fmt, pkt); | |
| av_packet_unref(pkt); | |
| if (ret < 0) { | |
| av_packet_free(&pkt); | |
| return NO; | |
| } | |
| } | |
| av_packet_free(&pkt); | |
| _frameCount++; | |
| return YES; | |
| } | |
| // Called on encode queue | |
| - (void)_flushEncoder_locked | |
| { | |
| if (!_fmt || !_venc) return; | |
| int ret = avcodec_send_frame(_venc, NULL); | |
| if (ret < 0) { | |
| return; | |
| } | |
| AVPacket *pkt = av_packet_alloc(); | |
| if (!pkt) return; | |
| for (;;) { | |
| ret = avcodec_receive_packet(_venc, pkt); | |
| if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) break; | |
| if (ret < 0) break; | |
| pkt->stream_index = _vstream->index; | |
| av_packet_rescale_ts(pkt, _venc->time_base, _vstream->time_base); | |
| av_interleaved_write_frame(_fmt, pkt); | |
| av_packet_unref(pkt); | |
| } | |
| av_packet_free(&pkt); | |
| } | |
| // Public stop: async — QC thread no longer blocks, so no visible jitter. | |
| - (void)_stopEncoding | |
| { | |
| if (!_fmt && !_venc) { | |
| _isRecording = NO; | |
| return; | |
| } | |
| BOOL wasRecording = _isRecording; | |
| _isRecording = NO; | |
| if (!_encodeQueue) { | |
| // Fallback: finalize synchronously (shouldn’t happen in practice). | |
| if (wasRecording && _fmt && _venc) { | |
| [self _flushEncoder_locked]; | |
| av_write_trailer(_fmt); | |
| } | |
| [self _cleanupFFmpeg]; | |
| _frameCount = 0; | |
| _scheduledFrames = 0; | |
| atomic_store(&_inFlightFrames, 0); | |
| return; | |
| } | |
| _finalizing = YES; | |
| int64_t totalFrames = _frameCount; | |
| dispatch_async(_encodeQueue, ^{ | |
| @autoreleasepool { | |
| if (wasRecording && _fmt && _venc) { | |
| [self _flushEncoder_locked]; | |
| av_write_trailer(_fmt); | |
| } | |
| NSLog(@"[FFExportScene] Recording stopped. Encoded frames: %lld", (long long)totalFrames); | |
| [self _cleanupFFmpeg]; | |
| _frameCount = 0; | |
| _scheduledFrames = 0; | |
| atomic_store(&_inFlightFrames, 0); | |
| _finalizing = NO; | |
| } | |
| }); | |
| } | |
| // Enqueue a frame for encoding; called from QC thread. | |
| - (void)_enqueueBGRAForEncoding:(uint8_t *)data | |
| rowBytes:(int)rowBytes | |
| pts:(int64_t)pts | |
| { | |
| if (!_fmt || !_venc || !_encodeQueue) { | |
| free(data); | |
| return; | |
| } | |
| atomic_fetch_add(&_inFlightFrames, 1); | |
| dispatch_async(_encodeQueue, ^{ | |
| @autoreleasepool { | |
| [self _encodeFrameWithBGRA_locked:data rowBytes:rowBytes pts:pts]; | |
| free(data); | |
| atomic_fetch_sub(&_inFlightFrames, 1); | |
| } | |
| }); | |
| } | |
| // Capture the current OpenGL scene and schedule it for encoding. | |
| // Returns YES if we actually scheduled a frame, NO if dropped/failed. | |
| - (BOOL)_captureSceneAtTime:(NSTimeInterval)time | |
| context:(id<QCPlugInContext>)context | |
| maxBacklog:(int)maxBacklog | |
| { | |
| (void)time; // PTS now comes purely from the frame counter. | |
| int inFlight = atomic_load(&_inFlightFrames); | |
| if (inFlight >= maxBacklog) { | |
| // Encoder is behind: drop this logical frame | |
| return NO; | |
| } | |
| CGLContextObj cgl_ctx = [context CGLContextObj]; | |
| (void)cgl_ctx; | |
| GLint viewport[4] = {0,0,0,0}; | |
| glGetIntegerv(GL_VIEWPORT, viewport); | |
| int w = viewport[2]; | |
| int h = viewport[3]; | |
| if (w == _srcWidth && h == _srcHeight && w > 0 && h > 0) { | |
| size_t rowBytes = (size_t)_srcWidth * 4; | |
| size_t needed = rowBytes * (size_t)_srcHeight; | |
| if (_captureBufSize < needed) { | |
| _captureBuf = (uint8_t*)realloc(_captureBuf, needed); | |
| _captureBufSize = needed; | |
| } | |
| if (_rowTmpSize < rowBytes) { | |
| _rowTmp = (uint8_t*)realloc(_rowTmp, rowBytes); | |
| _rowTmpSize = rowBytes; | |
| } | |
| // Read scene as BGRA from QC's framebuffer | |
| glPixelStorei(GL_PACK_ALIGNMENT, 4); | |
| glReadPixels(viewport[0], viewport[1], w, h, | |
| GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, | |
| _captureBuf); | |
| // Vertical flip: OpenGL is bottom-up; encoder expects top-down. | |
| uint8_t *top = _captureBuf; | |
| uint8_t *bottom = _captureBuf + (size_t)(_srcHeight - 1) * rowBytes; | |
| for (int y = 0; y < _srcHeight / 2; ++y) { | |
| memcpy(_rowTmp, top, rowBytes); | |
| memcpy(top, bottom, rowBytes); | |
| memcpy(bottom, _rowTmp, rowBytes); | |
| top += rowBytes; | |
| bottom -= rowBytes; | |
| } | |
| uint8_t *copy = (uint8_t*)malloc(needed); | |
| if (!copy) return NO; | |
| memcpy(copy, _captureBuf, needed); | |
| // Monotonic PTS: simple frame counter (prevents non-monotonic DTS warnings) | |
| int64_t pts = _nextPTS++; | |
| _scheduledFrames++; | |
| [self _enqueueBGRAForEncoding:copy rowBytes:(int)rowBytes pts:pts]; | |
| return YES; | |
| } | |
| return NO; | |
| } | |
| // -------------------------------------------------- | |
| // Execute (capture OpenGL scene) | |
| // -------------------------------------------------- | |
| - (BOOL)execute:(id<QCPlugInContext>)context | |
| atTime:(NSTimeInterval)time | |
| withArguments:(NSDictionary *)arguments | |
| { | |
| @autoreleasepool { | |
| NSString *path = self.inputOutputPath; | |
| if (!path || (id)path == [NSNull null]) path = @""; | |
| double fpsVal = self.inputFPS; | |
| if (fpsVal <= 0.0) fpsVal = 30.0; | |
| fpsVal = _clamp(fpsVal, 1.0, 240.0); | |
| double durVal = self.inputDuration; | |
| if (durVal < 0.0) durVal = 0.0; | |
| NSString *codecOpts = self.inputCodecOptions; | |
| if (!codecOpts || (id)codecOpts == [NSNull null]) codecOpts = @""; | |
| BOOL recVal = self.inputRecord; | |
| BOOL pauseVal = self.inputPause; | |
| BOOL limitFPSVal = self.inputLimitFPS; | |
| BOOL recEdgeOn = (recVal && !_prevRecord); | |
| BOOL recEdgeOff = (!recVal && _prevRecord); | |
| _prevRecord = recVal; | |
| // Start recording on Record rising edge | |
| if (recEdgeOn && !_isRecording && !_finalizing && path.length > 0) { | |
| CGLContextObj cgl_ctx = [context CGLContextObj]; | |
| (void)cgl_ctx; | |
| GLint viewport[4] = {0,0,0,0}; | |
| glGetIntegerv(GL_VIEWPORT, viewport); | |
| int w = viewport[2]; | |
| int h = viewport[3]; | |
| if (w > 0 && h > 0) { | |
| if ([self _startEncodingWithSourceWidth:w | |
| sourceHeight:h | |
| fps:fpsVal | |
| path:path | |
| options:codecOpts]) { | |
| _isRecording = YES; | |
| _durationLimit = durVal; | |
| _recordStartTime = time; | |
| _lastTime = time; | |
| _timeAccum = 0.0; | |
| _nextCaptureTime = time; // first capture can happen immediately in limited mode | |
| } else { | |
| NSLog(@"[FFExportScene] Failed to start encoding for path: %@", path); | |
| } | |
| } else { | |
| NSLog(@"[FFExportScene] Viewport size is zero; cannot start recording."); | |
| } | |
| } | |
| // Duration auto-stop (based on encoded timeline = scheduledFrames / FPS) | |
| if (_isRecording && _durationLimit > 0.0 && _fps > 0.0 && _scheduledFrames > 0) { | |
| double recordedSecs = (double)_scheduledFrames / _fps; | |
| if (recordedSecs >= _durationLimit) { | |
| [self _stopEncoding]; | |
| } | |
| } | |
| // Stop & finalize when Record is untoggled | |
| if (_isRecording && recEdgeOff) { | |
| [self _stopEncoding]; | |
| } | |
| // Encode frames while recording and NOT paused | |
| if (_isRecording) { | |
| double dt = time - _lastTime; | |
| if (dt < 0.0) dt = 0.0; | |
| _lastTime = time; | |
| if (!pauseVal) { | |
| // Effective FPS for scheduling; use actual encoder fps if available, otherwise input fpsVal. | |
| double effFPS = (_fps > 0.0 ? _fps : fpsVal); | |
| if (effFPS <= 0.0) effFPS = 30.0; | |
| double frameInterval = 1.0 / effFPS; | |
| // Dynamic backlog: ~1.5s of frames, clamped 12..120 | |
| int maxBacklog = (int)llround(effFPS * 1.5); | |
| if (maxBacklog < 12) maxBacklog = 12; | |
| if (maxBacklog > 120) maxBacklog = 120; | |
| if (limitFPSVal) { | |
| // Limit capture to at most FPS, one capture per QC tick; no "catch-up" loops. | |
| if (_nextCaptureTime <= 0.0) { | |
| _nextCaptureTime = time; | |
| } | |
| if (time >= _nextCaptureTime) { | |
| (void)[self _captureSceneAtTime:time | |
| context:context | |
| maxBacklog:maxBacklog]; | |
| _nextCaptureTime += frameInterval; | |
| // If we fell far behind (e.g. massive stutter), keep us close to "now" | |
| if (_nextCaptureTime < time) { | |
| _nextCaptureTime = time + frameInterval; | |
| } | |
| } | |
| } else { | |
| // Free-running path: attempt to maintain nominal FPS using accumulated QC time. | |
| _timeAccum += dt; | |
| int maxFramesThisTick = 4; | |
| int framesScheduled = 0; | |
| while (_timeAccum >= frameInterval && framesScheduled < maxFramesThisTick) { | |
| (void)[self _captureSceneAtTime:time | |
| context:context | |
| maxBacklog:maxBacklog]; | |
| _timeAccum -= frameInterval; | |
| framesScheduled++; | |
| } | |
| } | |
| } | |
| } | |
| return YES; | |
| } | |
| } | |
| @end |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment