diff --git a/AVPlayerExample.xcodeproj/project.pbxproj b/AVPlayerExample.xcodeproj/project.pbxproj old mode 100644 new mode 100755 index 671234c6..81a60d12 --- a/AVPlayerExample.xcodeproj/project.pbxproj +++ b/AVPlayerExample.xcodeproj/project.pbxproj @@ -3,40 +3,42 @@ archiveVersion = 1; classes = { }; - objectVersion = 46; + objectVersion = 50; objects = { /* Begin PBXBuildFile section */ - 8A0A1F191ECD1FFC0008C1AF /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 8A0A1F181ECD1FFC0008C1AF /* main.m */; }; - 8A0A1F1C1ECD1FFC0008C1AF /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 8A0A1F1B1ECD1FFC0008C1AF /* AppDelegate.m */; }; - 8A0A1F1F1ECD1FFC0008C1AF /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 8A0A1F1E1ECD1FFC0008C1AF /* ViewController.m */; }; - 8A0A1F221ECD1FFC0008C1AF /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 8A0A1F201ECD1FFC0008C1AF /* Main.storyboard */; }; - 8A0A1F241ECD1FFC0008C1AF /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A0A1F231ECD1FFC0008C1AF /* Assets.xcassets */; }; - 8A0A1F271ECD1FFC0008C1AF /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 8A0A1F251ECD1FFC0008C1AF /* LaunchScreen.storyboard */; }; - 8A85E4951ECE97F6004719C8 /* Utils.m in Sources */ = {isa = PBXBuildFile; fileRef = 8A85E4941ECE97F6004719C8 /* Utils.m */; }; - 8A85E4981ECEA82D004719C8 /* AVPlayerView.m in Sources */ = {isa = PBXBuildFile; fileRef = 8A85E4971ECEA82D004719C8 /* AVPlayerView.m */; }; + 8A34C1D52189333400F22BE9 /* ExampleAVPlayerAudioTap.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A34C1D42189333400F22BE9 /* ExampleAVPlayerAudioTap.swift */; }; + 8A34C1DA2189496A00F22BE9 /* ExampleAVPlayerAudioDevice.m in Sources */ = {isa = PBXBuildFile; fileRef = 8A34C1D92189496A00F22BE9 /* ExampleAVPlayerAudioDevice.m */; }; + 8A395E432187D2B200437980 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A395E422187D2B200437980 /* AppDelegate.swift */; }; + 8A395E452187D2B200437980 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A395E442187D2B200437980 /* ViewController.swift */; }; + 8A395E482187D2B200437980 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 8A395E462187D2B200437980 /* Main.storyboard */; }; + 8A395E4A2187D2B300437980 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 8A395E492187D2B300437980 /* Assets.xcassets */; }; + 8A395E4D2187D2B300437980 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 8A395E4B2187D2B300437980 /* LaunchScreen.storyboard */; }; + 8A395E552187D52400437980 /* ExampleAVPlayerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A395E542187D52400437980 /* ExampleAVPlayerView.swift */; }; + 8A395E572187F04C00437980 /* ExampleAVPlayerSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8A395E562187F04C00437980 /* ExampleAVPlayerSource.swift */; }; + 8AF48A832193FC5B007B1A84 /* ExampleAVPlayerProcessingTap.m in Sources */ = {isa = PBXBuildFile; fileRef = 8AF48A822193FC5B007B1A84 /* ExampleAVPlayerProcessingTap.m */; }; /* End PBXBuildFile section */ /* Begin PBXFileReference section */ - 24A82E5C22B18A48001EFC32 /* README.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = AVPlayerExample/README.md; sourceTree = ""; }; - 8A0A1F141ECD1FFC0008C1AF /* AVPlayerExample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = AVPlayerExample.app; sourceTree = BUILT_PRODUCTS_DIR; }; - 8A0A1F181ECD1FFC0008C1AF /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; - 8A0A1F1A1ECD1FFC0008C1AF /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; - 8A0A1F1B1ECD1FFC0008C1AF /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; - 8A0A1F1D1ECD1FFC0008C1AF /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; - 8A0A1F1E1ECD1FFC0008C1AF /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; }; - 8A0A1F211ECD1FFC0008C1AF /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; - 8A0A1F231ECD1FFC0008C1AF /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; - 8A0A1F261ECD1FFC0008C1AF /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; - 8A0A1F281ECD1FFC0008C1AF /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; - 8A85E4931ECE97F6004719C8 /* Utils.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Utils.h; sourceTree = ""; }; - 8A85E4941ECE97F6004719C8 /* Utils.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = Utils.m; sourceTree = ""; }; - 8A85E4961ECEA82D004719C8 /* AVPlayerView.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AVPlayerView.h; sourceTree = ""; }; - 8A85E4971ECEA82D004719C8 /* AVPlayerView.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AVPlayerView.m; sourceTree = ""; }; + 8A34C1D42189333400F22BE9 /* ExampleAVPlayerAudioTap.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ExampleAVPlayerAudioTap.swift; sourceTree = ""; }; + 8A34C1D72189496A00F22BE9 /* ExampleAVPlayerAudioDevice.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ExampleAVPlayerAudioDevice.h; sourceTree = ""; }; + 8A34C1D82189496A00F22BE9 /* AudioDevices-Bridging-Header.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "AudioDevices-Bridging-Header.h"; sourceTree = ""; }; + 8A34C1D92189496A00F22BE9 /* ExampleAVPlayerAudioDevice.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ExampleAVPlayerAudioDevice.m; sourceTree = ""; }; + 8A395E3F2187D2B200437980 /* AVPlayerExample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = AVPlayerExample.app; sourceTree = BUILT_PRODUCTS_DIR; }; + 8A395E422187D2B200437980 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; + 8A395E442187D2B200437980 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; + 8A395E472187D2B200437980 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; + 8A395E492187D2B300437980 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + 8A395E4C2187D2B300437980 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; + 8A395E4E2187D2B300437980 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + 8A395E542187D52400437980 /* ExampleAVPlayerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ExampleAVPlayerView.swift; sourceTree = ""; }; + 8A395E562187F04C00437980 /* ExampleAVPlayerSource.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ExampleAVPlayerSource.swift; sourceTree = ""; }; + 8AF48A812193FC5B007B1A84 /* ExampleAVPlayerProcessingTap.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ExampleAVPlayerProcessingTap.h; sourceTree = ""; }; + 8AF48A822193FC5B007B1A84 /* ExampleAVPlayerProcessingTap.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ExampleAVPlayerProcessingTap.m; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ - 8A0A1F111ECD1FFC0008C1AF /* Frameworks */ = { + 8A395E3C2187D2B200437980 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( @@ -46,147 +48,147 @@ /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ - 8A0A1F0B1ECD1FFC0008C1AF = { + 8A34C1D62189496A00F22BE9 /* AudioDevices */ = { isa = PBXGroup; children = ( - 8A0A1F161ECD1FFC0008C1AF /* AVPlayerExample */, - 8A0A1F151ECD1FFC0008C1AF /* Products */, - 24A82E5C22B18A48001EFC32 /* README.md */, + 8A34C1D82189496A00F22BE9 /* AudioDevices-Bridging-Header.h */, + 8A34C1D72189496A00F22BE9 /* ExampleAVPlayerAudioDevice.h */, + 8A34C1D92189496A00F22BE9 /* ExampleAVPlayerAudioDevice.m */, + 8AF48A812193FC5B007B1A84 /* ExampleAVPlayerProcessingTap.h */, + 8AF48A822193FC5B007B1A84 /* ExampleAVPlayerProcessingTap.m */, ); + path = AudioDevices; sourceTree = ""; }; - 8A0A1F151ECD1FFC0008C1AF /* Products */ = { + 8A395E362187D2B200437980 = { isa = PBXGroup; children = ( - 8A0A1F141ECD1FFC0008C1AF /* AVPlayerExample.app */, + 8A395E412187D2B200437980 /* AVPlayerExample */, + 8A395E402187D2B200437980 /* Products */, ); - name = Products; sourceTree = ""; }; - 8A0A1F161ECD1FFC0008C1AF /* AVPlayerExample */ = { + 8A395E402187D2B200437980 /* Products */ = { isa = PBXGroup; children = ( - 8A0A1F1A1ECD1FFC0008C1AF /* AppDelegate.h */, - 8A0A1F1B1ECD1FFC0008C1AF /* AppDelegate.m */, - 8A85E4961ECEA82D004719C8 /* AVPlayerView.h */, - 8A85E4971ECEA82D004719C8 /* AVPlayerView.m */, - 8A85E4931ECE97F6004719C8 /* Utils.h */, - 8A85E4941ECE97F6004719C8 /* Utils.m */, - 8A0A1F1D1ECD1FFC0008C1AF /* ViewController.h */, - 8A0A1F1E1ECD1FFC0008C1AF /* ViewController.m */, - 8A0A1F201ECD1FFC0008C1AF /* Main.storyboard */, - 8A0A1F231ECD1FFC0008C1AF /* Assets.xcassets */, - 8A0A1F251ECD1FFC0008C1AF /* LaunchScreen.storyboard */, - 8A0A1F281ECD1FFC0008C1AF /* Info.plist */, - 8A0A1F171ECD1FFC0008C1AF /* Supporting Files */, + 8A395E3F2187D2B200437980 /* AVPlayerExample.app */, ); - path = AVPlayerExample; + name = Products; sourceTree = ""; }; - 8A0A1F171ECD1FFC0008C1AF /* Supporting Files */ = { + 8A395E412187D2B200437980 /* AVPlayerExample */ = { isa = PBXGroup; children = ( - 8A0A1F181ECD1FFC0008C1AF /* main.m */, + 8A395E422187D2B200437980 /* AppDelegate.swift */, + 8A34C1D62189496A00F22BE9 /* AudioDevices */, + 8A34C1D42189333400F22BE9 /* ExampleAVPlayerAudioTap.swift */, + 8A395E562187F04C00437980 /* ExampleAVPlayerSource.swift */, + 8A395E542187D52400437980 /* ExampleAVPlayerView.swift */, + 8A395E442187D2B200437980 /* ViewController.swift */, + 8A395E462187D2B200437980 /* Main.storyboard */, + 8A395E492187D2B300437980 /* Assets.xcassets */, + 8A395E4B2187D2B300437980 /* LaunchScreen.storyboard */, + 8A395E4E2187D2B300437980 /* Info.plist */, ); - name = "Supporting Files"; + path = AVPlayerExample; sourceTree = ""; }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ - 8A0A1F131ECD1FFC0008C1AF /* AVPlayerExample */ = { + 8A395E3E2187D2B200437980 /* AVPlayerExample */ = { isa = PBXNativeTarget; - buildConfigurationList = 8A0A1F2B1ECD1FFC0008C1AF /* Build configuration list for PBXNativeTarget "AVPlayerExample" */; + buildConfigurationList = 8A395E512187D2B300437980 /* Build configuration list for PBXNativeTarget "AVPlayerExample" */; buildPhases = ( - 8A0A1F101ECD1FFC0008C1AF /* Sources */, - 8A0A1F111ECD1FFC0008C1AF /* Frameworks */, - 8A0A1F121ECD1FFC0008C1AF /* Resources */, + 8A395E3B2187D2B200437980 /* Sources */, + 8A395E3C2187D2B200437980 /* Frameworks */, + 8A395E3D2187D2B200437980 /* Resources */, ); buildRules = ( ); dependencies = ( ); name = AVPlayerExample; - productName = AVPlayerExample; - productReference = 8A0A1F141ECD1FFC0008C1AF /* AVPlayerExample.app */; + productName = CoViewingExample; + productReference = 8A395E3F2187D2B200437980 /* AVPlayerExample.app */; productType = "com.apple.product-type.application"; }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ - 8A0A1F0C1ECD1FFC0008C1AF /* Project object */ = { + 8A395E372187D2B200437980 /* Project object */ = { isa = PBXProject; attributes = { - LastUpgradeCheck = 0830; + LastSwiftUpdateCheck = 1000; + LastUpgradeCheck = 1000; ORGANIZATIONNAME = "Twilio Inc."; TargetAttributes = { - 8A0A1F131ECD1FFC0008C1AF = { - CreatedOnToolsVersion = 8.3.2; - DevelopmentTeam = SX5J6BN2KX; - ProvisioningStyle = Automatic; + 8A395E3E2187D2B200437980 = { + CreatedOnToolsVersion = 10.0; }; }; }; - buildConfigurationList = 8A0A1F0F1ECD1FFC0008C1AF /* Build configuration list for PBXProject "AVPlayerExample" */; - compatibilityVersion = "Xcode 3.2"; - developmentRegion = English; + buildConfigurationList = 8A395E3A2187D2B200437980 /* Build configuration list for PBXProject "AVPlayerExample" */; + compatibilityVersion = "Xcode 9.3"; + developmentRegion = en; hasScannedForEncodings = 0; knownRegions = ( - English, en, Base, ); - mainGroup = 8A0A1F0B1ECD1FFC0008C1AF; - productRefGroup = 8A0A1F151ECD1FFC0008C1AF /* Products */; + mainGroup = 8A395E362187D2B200437980; + productRefGroup = 8A395E402187D2B200437980 /* Products */; projectDirPath = ""; projectRoot = ""; targets = ( - 8A0A1F131ECD1FFC0008C1AF /* AVPlayerExample */, + 8A395E3E2187D2B200437980 /* AVPlayerExample */, ); }; /* End PBXProject section */ /* Begin PBXResourcesBuildPhase section */ - 8A0A1F121ECD1FFC0008C1AF /* Resources */ = { + 8A395E3D2187D2B200437980 /* Resources */ = { isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( - 8A0A1F271ECD1FFC0008C1AF /* LaunchScreen.storyboard in Resources */, - 8A0A1F241ECD1FFC0008C1AF /* Assets.xcassets in Resources */, - 8A0A1F221ECD1FFC0008C1AF /* Main.storyboard in Resources */, + 8A395E4D2187D2B300437980 /* LaunchScreen.storyboard in Resources */, + 8A395E4A2187D2B300437980 /* Assets.xcassets in Resources */, + 8A395E482187D2B200437980 /* Main.storyboard in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXResourcesBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ - 8A0A1F101ECD1FFC0008C1AF /* Sources */ = { + 8A395E3B2187D2B200437980 /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( - 8A0A1F1F1ECD1FFC0008C1AF /* ViewController.m in Sources */, - 8A0A1F1C1ECD1FFC0008C1AF /* AppDelegate.m in Sources */, - 8A0A1F191ECD1FFC0008C1AF /* main.m in Sources */, - 8A85E4951ECE97F6004719C8 /* Utils.m in Sources */, - 8A85E4981ECEA82D004719C8 /* AVPlayerView.m in Sources */, + 8A395E452187D2B200437980 /* ViewController.swift in Sources */, + 8A395E432187D2B200437980 /* AppDelegate.swift in Sources */, + 8A395E552187D52400437980 /* ExampleAVPlayerView.swift in Sources */, + 8A34C1D52189333400F22BE9 /* ExampleAVPlayerAudioTap.swift in Sources */, + 8AF48A832193FC5B007B1A84 /* ExampleAVPlayerProcessingTap.m in Sources */, + 8A34C1DA2189496A00F22BE9 /* ExampleAVPlayerAudioDevice.m in Sources */, + 8A395E572187F04C00437980 /* ExampleAVPlayerSource.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXSourcesBuildPhase section */ /* Begin PBXVariantGroup section */ - 8A0A1F201ECD1FFC0008C1AF /* Main.storyboard */ = { + 8A395E462187D2B200437980 /* Main.storyboard */ = { isa = PBXVariantGroup; children = ( - 8A0A1F211ECD1FFC0008C1AF /* Base */, + 8A395E472187D2B200437980 /* Base */, ); name = Main.storyboard; sourceTree = ""; }; - 8A0A1F251ECD1FFC0008C1AF /* LaunchScreen.storyboard */ = { + 8A395E4B2187D2B300437980 /* LaunchScreen.storyboard */ = { isa = PBXVariantGroup; children = ( - 8A0A1F261ECD1FFC0008C1AF /* Base */, + 8A395E4C2187D2B300437980 /* Base */, ); name = LaunchScreen.storyboard; sourceTree = ""; @@ -194,34 +196,44 @@ /* End PBXVariantGroup section */ /* Begin XCBuildConfiguration section */ - 8A0A1F291ECD1FFC0008C1AF /* Debug */ = { + 8A395E4F2187D2B300437980 /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; CLANG_ANALYZER_NONNULL = YES; CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; - CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LANGUAGE_STANDARD = "c++14"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_DOCUMENTATION_COMMENTS = YES; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; - "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + CODE_SIGN_IDENTITY = "iPhone Developer"; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = dwarf; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_TESTABILITY = YES; - GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_C_LANGUAGE_STANDARD = gnu11; GCC_DYNAMIC_NO_PIC = NO; GCC_NO_COMMON_BLOCKS = YES; GCC_OPTIMIZATION_LEVEL = 0; @@ -235,42 +247,54 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 10.3; - MTL_ENABLE_DEBUG_INFO = YES; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; - TARGETED_DEVICE_FAMILY = "1,2"; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; }; name = Debug; }; - 8A0A1F2A1ECD1FFC0008C1AF /* Release */ = { + 8A395E502187D2B300437980 /* Release */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; CLANG_ANALYZER_NONNULL = YES; CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; - CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; + CLANG_CXX_LANGUAGE_STANDARD = "c++14"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_DOCUMENTATION_COMMENTS = YES; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; - "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + CODE_SIGN_IDENTITY = "iPhone Developer"; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_NS_ASSERTIONS = NO; ENABLE_STRICT_OBJC_MSGSEND = YES; - GCC_C_LANGUAGE_STANDARD = gnu99; + GCC_C_LANGUAGE_STANDARD = gnu11; GCC_NO_COMMON_BLOCKS = YES; GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; @@ -278,62 +302,76 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 10.3; + IPHONEOS_DEPLOYMENT_TARGET = 11.0; MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; SDKROOT = iphoneos; - TARGETED_DEVICE_FAMILY = "1,2"; + SWIFT_COMPILATION_MODE = wholemodule; + SWIFT_OPTIMIZATION_LEVEL = "-O"; VALIDATE_PRODUCT = YES; }; name = Release; }; - 8A0A1F2C1ECD1FFC0008C1AF /* Debug */ = { + 8A395E522187D2B300437980 /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CODE_SIGN_STYLE = Automatic; DEVELOPMENT_TEAM = SX5J6BN2KX; INFOPLIST_FILE = AVPlayerExample/Info.plist; - IPHONEOS_DEPLOYMENT_TARGET = 9.0; - LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); PRODUCT_BUNDLE_IDENTIFIER = com.twilio.AVPlayerExample; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OBJC_BRIDGING_HEADER = "AVPlayerExample/AudioDevices/AudioDevices-Bridging-Header.h"; + SWIFT_VERSION = 4.2; + TARGETED_DEVICE_FAMILY = "1,2"; }; name = Debug; }; - 8A0A1F2D1ECD1FFC0008C1AF /* Release */ = { + 8A395E532187D2B300437980 /* Release */ = { isa = XCBuildConfiguration; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + CODE_SIGN_STYLE = Automatic; DEVELOPMENT_TEAM = SX5J6BN2KX; INFOPLIST_FILE = AVPlayerExample/Info.plist; - IPHONEOS_DEPLOYMENT_TARGET = 9.0; - LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); PRODUCT_BUNDLE_IDENTIFIER = com.twilio.AVPlayerExample; PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OBJC_BRIDGING_HEADER = "AVPlayerExample/AudioDevices/AudioDevices-Bridging-Header.h"; + SWIFT_VERSION = 4.2; + TARGETED_DEVICE_FAMILY = "1,2"; }; name = Release; }; /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ - 8A0A1F0F1ECD1FFC0008C1AF /* Build configuration list for PBXProject "AVPlayerExample" */ = { + 8A395E3A2187D2B200437980 /* Build configuration list for PBXProject "AVPlayerExample" */ = { isa = XCConfigurationList; buildConfigurations = ( - 8A0A1F291ECD1FFC0008C1AF /* Debug */, - 8A0A1F2A1ECD1FFC0008C1AF /* Release */, + 8A395E4F2187D2B300437980 /* Debug */, + 8A395E502187D2B300437980 /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; - 8A0A1F2B1ECD1FFC0008C1AF /* Build configuration list for PBXNativeTarget "AVPlayerExample" */ = { + 8A395E512187D2B300437980 /* Build configuration list for PBXNativeTarget "AVPlayerExample" */ = { isa = XCConfigurationList; buildConfigurations = ( - 8A0A1F2C1ECD1FFC0008C1AF /* Debug */, - 8A0A1F2D1ECD1FFC0008C1AF /* Release */, + 8A395E522187D2B300437980 /* Debug */, + 8A395E532187D2B300437980 /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; /* End XCConfigurationList section */ }; - rootObject = 8A0A1F0C1ECD1FFC0008C1AF /* Project object */; + rootObject = 8A395E372187D2B200437980 /* Project object */; } diff --git a/AVPlayerExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/AVPlayerExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata old mode 100644 new mode 100755 index 3a0c3375..77a38387 --- a/AVPlayerExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata +++ b/AVPlayerExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -2,6 +2,6 @@ + location = "self:CoViewingExample.xcodeproj"> diff --git a/AVPlayerExample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/AVPlayerExample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100755 index 00000000..18d98100 --- /dev/null +++ b/AVPlayerExample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/AVPlayerExample/AVPlayerView.h b/AVPlayerExample/AVPlayerView.h deleted file mode 100644 index 53232048..00000000 --- a/AVPlayerExample/AVPlayerView.h +++ /dev/null @@ -1,16 +0,0 @@ -// -// AVPlayerView.h -// AVPlayerExample -// -// Copyright © 2016-2017 Twilio, Inc. All rights reserved. -// - -#import - -@class AVPlayer; - -@interface AVPlayerView : UIView - -- (instancetype)initWithPlayer:(AVPlayer *)player; - -@end diff --git a/AVPlayerExample/AVPlayerView.m b/AVPlayerExample/AVPlayerView.m deleted file mode 100644 index 62bdf05f..00000000 --- a/AVPlayerExample/AVPlayerView.m +++ /dev/null @@ -1,30 +0,0 @@ -// -// AVPlayerView.m -// AVPlayerExample -// -// Copyright © 2016-2017 Twilio, Inc. All rights reserved. -// - -#import "AVPlayerView.h" - -#import - -@implementation AVPlayerView - -- (instancetype)initWithPlayer:(AVPlayer *)player { - self = [super initWithFrame:CGRectZero]; - if (self) { - [self playerLayer].player = player; - } - return self; -} - -+ (Class)layerClass { - return [AVPlayerLayer class]; -} - -- (AVPlayerLayer *)playerLayer { - return (AVPlayerLayer *)self.layer; -} - -@end diff --git a/AVPlayerExample/AppDelegate.h b/AVPlayerExample/AppDelegate.h deleted file mode 100644 index 86fe8949..00000000 --- a/AVPlayerExample/AppDelegate.h +++ /dev/null @@ -1,15 +0,0 @@ -// -// AppDelegate.h -// AVPlayerExample -// -// Copyright © 2016-2017 Twilio, Inc. All rights reserved. -// - -#import - -@interface AppDelegate : UIResponder - -@property (strong, nonatomic) UIWindow *window; - -@end - diff --git a/AVPlayerExample/AppDelegate.m b/AVPlayerExample/AppDelegate.m deleted file mode 100644 index b1274d73..00000000 --- a/AVPlayerExample/AppDelegate.m +++ /dev/null @@ -1,44 +0,0 @@ -// -// AppDelegate.m -// AVPlayerExample -// -// Copyright © 2016-2017 Twilio, Inc. All rights reserved. -// - -#import "AppDelegate.h" - -@interface AppDelegate () - -@end - -@implementation AppDelegate - - -- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { - // Override point for customization after application launch. - return YES; -} - -- (void)applicationWillResignActive:(UIApplication *)application { - // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. - // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. -} - -- (void)applicationDidEnterBackground:(UIApplication *)application { - // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. - // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. -} - -- (void)applicationWillEnterForeground:(UIApplication *)application { - // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. -} - -- (void)applicationDidBecomeActive:(UIApplication *)application { - // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. -} - -- (void)applicationWillTerminate:(UIApplication *)application { - // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. -} - -@end diff --git a/AVPlayerExample/AppDelegate.swift b/AVPlayerExample/AppDelegate.swift new file mode 100755 index 00000000..f8679722 --- /dev/null +++ b/AVPlayerExample/AppDelegate.swift @@ -0,0 +1,59 @@ +// +// AppDelegate.swift +// AVPlayerExample +// +// Copyright © 2018 Twilio Inc. All rights reserved. +// + +import UIKit + +@UIApplicationMain +class AppDelegate: UIResponder, UIApplicationDelegate { + + var window: UIWindow? + + + func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { + print("didFinishLaunchingWithOptions:", launchOptions as Any) + if let options = launchOptions, + let videoUrl = options[UIApplication.LaunchOptionsKey.url] as? URL { + let rootVC = window?.rootViewController as! ViewController + rootVC.connect(contentUrl: videoUrl) + } + return true + } + + func application(_ app: UIApplication, open url: URL, options: [UIApplication.OpenURLOptionsKey : Any] = [:]) -> Bool { + print("app:openURL:", url, " options:", options as Any) + + let rootVC = window?.rootViewController as! ViewController + rootVC.connect(contentUrl: url) + + return true + } + + func applicationWillResignActive(_ application: UIApplication) { + // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. + // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. + } + + func applicationDidEnterBackground(_ application: UIApplication) { + // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. + // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. + } + + func applicationWillEnterForeground(_ application: UIApplication) { + // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. + } + + func applicationDidBecomeActive(_ application: UIApplication) { + // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. + } + + func applicationWillTerminate(_ application: UIApplication) { + // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. + } + + +} + diff --git a/AVPlayerExample/Assets.xcassets/AppIcon.appiconset/Contents.json b/AVPlayerExample/Assets.xcassets/AppIcon.appiconset/Contents.json old mode 100644 new mode 100755 index 1d060ed2..d8db8d65 --- a/AVPlayerExample/Assets.xcassets/AppIcon.appiconset/Contents.json +++ b/AVPlayerExample/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -84,6 +84,11 @@ "idiom" : "ipad", "size" : "83.5x83.5", "scale" : "2x" + }, + { + "idiom" : "ios-marketing", + "size" : "1024x1024", + "scale" : "1x" } ], "info" : { diff --git a/AVPlayerExample/Assets.xcassets/Contents.json b/AVPlayerExample/Assets.xcassets/Contents.json new file mode 100755 index 00000000..da4a164c --- /dev/null +++ b/AVPlayerExample/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "version" : 1, + "author" : "xcode" + } +} \ No newline at end of file diff --git a/AVPlayerExample/AudioDevices/AudioDevices-Bridging-Header.h b/AVPlayerExample/AudioDevices/AudioDevices-Bridging-Header.h new file mode 100755 index 00000000..1085fbd8 --- /dev/null +++ b/AVPlayerExample/AudioDevices/AudioDevices-Bridging-Header.h @@ -0,0 +1,8 @@ +// +// AudioDevices-Bridging-Header.h +// AVPlayerExample +// +// Copyright © 2018 Twilio Inc. All rights reserved. +// + +#import "ExampleAVPlayerAudioDevice.h" diff --git a/AVPlayerExample/AudioDevices/ExampleAVPlayerAudioDevice.h b/AVPlayerExample/AudioDevices/ExampleAVPlayerAudioDevice.h new file mode 100755 index 00000000..705682e2 --- /dev/null +++ b/AVPlayerExample/AudioDevices/ExampleAVPlayerAudioDevice.h @@ -0,0 +1,29 @@ +// +// ExampleAVPlayerAudioDevice.h +// AVPlayerExample +// +// Copyright © 2018 Twilio, Inc. All rights reserved. +// + +#import + +/* + * ExampleAVPlayerAudioDevice uses a VoiceProcessingIO audio unit to play audio from an MTAudioProcessingTap + * attached to an AVPlayerItem. The AVPlayer audio is mixed with Room audio provided by Twilio. + * The microphone input, and MTAudioProcessingTap output are mixed into a single recorded stream. + */ +@interface ExampleAVPlayerAudioDevice : NSObject + +- (void)audioTapDidPrepare; + +- (void)startAudioTapAtTime:(CMTime)startTime; + +/* + * Creates a processing tap bound to the device instance. + * + * @return An `MTAudioProcessingTap`, or NULL if there is an error. The caller assumes all ownership + * of the tap, and should call CFRelease when they are finished with it. + */ +- (nullable MTAudioProcessingTapRef)createProcessingTap; + +@end diff --git a/AVPlayerExample/AudioDevices/ExampleAVPlayerAudioDevice.m b/AVPlayerExample/AudioDevices/ExampleAVPlayerAudioDevice.m new file mode 100755 index 00000000..24daa7a6 --- /dev/null +++ b/AVPlayerExample/AudioDevices/ExampleAVPlayerAudioDevice.m @@ -0,0 +1,1167 @@ +// +// ExampleAVPlayerAudioDevice.m +// AVPlayerExample +// +// Copyright © 2018 Twilio, Inc. All rights reserved. +// + +#import "ExampleAVPlayerAudioDevice.h" + +#import "ExampleAVPlayerProcessingTap.h" +#import "TPCircularBuffer+AudioBufferList.h" + +// We want to get as close to 20 msec buffers as possible, to match the behavior of TVIDefaultAudioDevice. +static double const kPreferredIOBufferDuration = 0.01; +// We will use stereo playback where available. Some audio routes may be restricted to mono only. +static size_t const kPreferredNumberOfChannels = 1; +static size_t const kPreferredNumberOfInputChannels = 1; +// An audio sample is a signed 16-bit integer. +static size_t const kAudioSampleSize = sizeof(SInt16); +static uint32_t const kPreferredSampleRate = 48000; + +typedef struct ExampleAVPlayerRendererContext { + // Used to pull audio from the media engine. + TVIAudioDeviceContext deviceContext; + size_t expectedFramesPerBuffer; + size_t maxFramesPerBuffer; + + // The buffer of AVPlayer content that we will consume. + TPCircularBuffer *playoutBuffer; + AudioTimeStamp playoutStartTimestamp; + AudioTimeStamp playoutSampleTimestamp; +} ExampleAVPlayerRendererContext; + +typedef struct ExampleAVPlayerCapturerContext { + // Used to deliver recorded audio to the media engine. + TVIAudioDeviceContext deviceContext; + size_t expectedFramesPerBuffer; + size_t maxFramesPerBuffer; + + // Core Audio's VoiceProcessingIO audio unit. + AudioUnit audioUnit; + AudioConverterRef audioConverter; + + // Buffer used to render audio samples into. + int16_t *audioBuffer; + + // The buffer of AVPlayer content that we will consume. + TPCircularBuffer *recordingBuffer; +} ExampleAVPlayerCapturerContext; + +// The IO audio units use bus 0 for ouptut, and bus 1 for input. +static int kOutputBus = 0; +static int kInputBus = 1; +// This is the maximum slice size for RemoteIO (as observed in the field). We will double check at initialization time. +static size_t kMaximumFramesPerBuffer = 1156; + +@interface ExampleAVPlayerAudioDevice() + +/** + Indicates that our AVAudioSession and audio graph have been interrupted. When an interruption ends we will take steps + to restart our audio graph. + */ +@property (nonatomic, assign, getter=isInterrupted) BOOL interrupted; + +/** + A multi-channel mixer which takes as input: + + 1. Decoded LPCM audio from Twilio. Remote audio is mixed and pulled from the media engine in `renderingFormat`. + 2. Decoded, format converted LPCM audio consumed from our MTAudioProcessingTap. + + The mixer's output is connected to the input of the VoiceProcessingIO's output bus. + */ +@property (nonatomic, assign) AudioUnit playbackMixer; + +/** + A VoiceProcessingIO audio unit which performs several important functions. + + Input Graph + 1. Record from the microphone. + 2. Echo cancellation of the loudspeaker output from the microphone input. + 3. Deliver mixed, recorded samples from the microphone and AVPlayer to Twilio. + + Output Graph + 1. Pull audio from the output of `playbackMixer`. + + The mixer's output is connected to the input of the VoiceProcessingIO's output bus. + */ +@property (nonatomic, assign) AudioUnit voiceProcessingIO; + + +/** + The tap used to access audio samples from AVPlayer. This is where we produce audio for playback and recording. + */ +@property (nonatomic, assign, nullable) MTAudioProcessingTapRef audioTap; + +/** + A context which contains the state needed for the processing tap's C functions. + */ +@property (nonatomic, assign, nullable) ExampleAVPlayerAudioTapContext *audioTapContext; + +/** + A circular buffer used to feed the recording side of the audio graph with frames produced by our processing tap. + */ +@property (nonatomic, assign, nullable) TPCircularBuffer *audioTapCapturingBuffer; + +/** + A circular buffer used to feed the playback side of the audio graph with frames produced by our processing tap. + */ +@property (nonatomic, assign, nullable) TPCircularBuffer *audioTapRenderingBuffer; + +@property (nonatomic, assign) AudioConverterRef captureConverter; +@property (nonatomic, assign) int16_t *captureBuffer; +@property (nonatomic, strong, nullable) TVIAudioFormat *capturingFormat; +@property (nonatomic, assign, nullable) ExampleAVPlayerCapturerContext *capturingContext; +@property (atomic, assign, nullable) ExampleAVPlayerRendererContext *renderingContext; +@property (nonatomic, strong, nullable) TVIAudioFormat *renderingFormat; + +/** + A convenience getter that indicates if either `wantsCapturing` or `wantsRendering` are true. + */ +@property (nonatomic, assign, readonly) BOOL wantsAudio; + +/** + Indicates that our audio device has been requested to capture audio by Twilio. Capturing occurs when you publish + a TVILocalAudioTrack in a Group Room, or a Peer-to-Peer Room with 1 or more Participant. + */ +@property (nonatomic, assign) BOOL wantsCapturing; + +/** + Indicates that our audio device has been requested to render audio by Twilio. Rendering occurs when one or more Remote + Participants publish a TVIRemoteAudioTrack in a Room. + */ +@property (nonatomic, assign) BOOL wantsRendering; + +@end + +@implementation ExampleAVPlayerAudioDevice + +@synthesize audioTapCapturingBuffer = _audioTapCapturingBuffer; + +#pragma mark - Init & Dealloc + +- (id)init { + self = [super init]; + if (self) { + _audioTapCapturingBuffer = calloc(1, sizeof(TPCircularBuffer)); + _audioTapRenderingBuffer = calloc(1, sizeof(TPCircularBuffer)); + _wantsCapturing = NO; + _wantsRendering = NO; + + _audioTapContext = calloc(1, sizeof(ExampleAVPlayerAudioTapContext)); + _audioTapContext->capturingBuffer = _audioTapCapturingBuffer; + _audioTapContext->renderingBuffer = _audioTapRenderingBuffer; + _audioTapContext->audioDevice = self; + _audioTapContext->audioTapPrepared = NO; + } + return self; +} + +- (void)dealloc { + [self unregisterAVAudioSessionObservers]; + + free(_audioTapCapturingBuffer); + free(_audioTapRenderingBuffer); + free(_audioTapContext); +} + ++ (NSString *)description { + return @"ExampleAVPlayerAudioDevice"; +} + +/* + * Determine at runtime the maximum slice size used by our audio unit. Setting the stream format and sample rate doesn't + * appear to impact the maximum size so we prefer to read this value once at initialization time. + */ ++ (void)initialize { + AudioComponentDescription audioUnitDescription = [self audioUnitDescription]; + AudioComponent audioComponent = AudioComponentFindNext(NULL, &audioUnitDescription); + AudioUnit audioUnit; + OSStatus status = AudioComponentInstanceNew(audioComponent, &audioUnit); + if (status != 0) { + NSLog(@"Could not find RemoteIO AudioComponent instance!"); + return; + } + + UInt32 framesPerSlice = 0; + UInt32 propertySize = sizeof(framesPerSlice); + status = AudioUnitGetProperty(audioUnit, kAudioUnitProperty_MaximumFramesPerSlice, + kAudioUnitScope_Global, kOutputBus, + &framesPerSlice, &propertySize); + if (status != 0) { + NSLog(@"Could not read RemoteIO AudioComponent instance!"); + AudioComponentInstanceDispose(audioUnit); + return; + } + + NSLog(@"This device uses a maximum slice size of %d frames.", (unsigned int)framesPerSlice); + kMaximumFramesPerBuffer = (size_t)framesPerSlice; + AudioComponentInstanceDispose(audioUnit); +} + +#pragma mark - Public + +- (BOOL)wantsAudio { + return _wantsCapturing || _wantsRendering; +} + +- (void)audioTapDidPrepare { + NSLog(@"%s", __PRETTY_FUNCTION__); +} + +- (void)startAudioTapAtTime:(CMTime)startTime { + @synchronized (self) { + TVIAudioDeviceContext *context = _capturingContext ? _capturingContext->deviceContext : _renderingContext ? _renderingContext->deviceContext : NULL; + if (context) { + TVIAudioDeviceExecuteWorkerBlock(context, ^{ + [self restartAudioUnitAtTime:startTime]; + }); + } + } +} + +- (MTAudioProcessingTapRef)createProcessingTap { + if (_audioTap) { + return _audioTap; + } + + MTAudioProcessingTapRef processingTap; + MTAudioProcessingTapCallbacks callbacks; + callbacks.version = kMTAudioProcessingTapCallbacksVersion_0; + callbacks.init = AVPlayerProcessingTapInit; + callbacks.prepare = AVPlayerProcessingTapPrepare; + callbacks.process = AVPlayerProcessingTapProcess; + callbacks.unprepare = AVPlayerProcessingTapUnprepare; + callbacks.finalize = AVPlayerProcessingTapFinalize; + callbacks.clientInfo = (void *)(_audioTapContext); + + OSStatus status = MTAudioProcessingTapCreate(kCFAllocatorDefault, + &callbacks, + kMTAudioProcessingTapCreationFlag_PostEffects, + &processingTap); + if (status == kCVReturnSuccess) { + _audioTap = processingTap; + return processingTap; + } else { + return NULL; + } +} + +#pragma mark - TVIAudioDeviceRenderer + +- (nullable TVIAudioFormat *)renderFormat { + if (!_renderingFormat) { + // Setup the AVAudioSession early. You could also defer to `startRendering:` and `stopRendering:`. + [self setupAVAudioSession]; + + _renderingFormat = [[self class] activeFormat]; + } + + return _renderingFormat; +} + +- (BOOL)initializeRenderer { + /* + * In this example we don't need any fixed size buffers or other pre-allocated resources. We will simply write + * directly to the AudioBufferList provided in the AudioUnit's rendering callback. + */ + return YES; +} + +- (BOOL)startRendering:(nonnull TVIAudioDeviceContext)context { + NSLog(@"%s %@", __PRETTY_FUNCTION__, self.renderingFormat); + + @synchronized(self) { + // Restart the already setup graph. + if (_voiceProcessingIO) { + [self stopAudioUnit]; + [self teardownAudioUnit]; + } + + self.wantsRendering = YES; + if (!self.renderingContext) { + self.renderingContext = malloc(sizeof(ExampleAVPlayerRendererContext)); + memset(self.renderingContext, 0, sizeof(ExampleAVPlayerRendererContext)); + } + self.renderingContext->deviceContext = context; + self.renderingContext->maxFramesPerBuffer = _renderingFormat.framesPerBuffer; + + // Ensure that we wait for the audio tap buffer to become ready. + if (self.audioTapContext->audioTapPrepared) { + self.renderingContext->playoutBuffer = _audioTapRenderingBuffer; + } else { + AudioTimeStamp start = {0}; + start.mFlags = kAudioTimeStampNothingValid; + self.renderingContext->playoutStartTimestamp = start; + self.renderingContext->playoutBuffer = NULL; + } + + const NSTimeInterval sessionBufferDuration = [AVAudioSession sharedInstance].IOBufferDuration; + const double sessionSampleRate = [AVAudioSession sharedInstance].sampleRate; + const size_t sessionFramesPerBuffer = (size_t)(sessionSampleRate * sessionBufferDuration + .5); + self.renderingContext->expectedFramesPerBuffer = sessionFramesPerBuffer; + + if (![self setupAudioUnitRendererContext:self.renderingContext + capturerContext:self.capturingContext]) { + free(self.renderingContext); + self.renderingContext = NULL; + self.wantsRendering = NO; + return NO; + } else if (self.capturingContext) { + self.capturingContext->audioUnit = _voiceProcessingIO; + self.capturingContext->audioConverter = _captureConverter; + } + } + + BOOL success = [self startAudioUnit]; + if (success) { + TVIAudioSessionActivated(context); + } + return success; +} + +- (BOOL)stopRendering { + NSLog(@"%s", __PRETTY_FUNCTION__); + + @synchronized(self) { + NSAssert(self.renderingContext != NULL, @"We should have a rendering context when stopping."); + self.wantsRendering = NO; + + if (!self.wantsAudio) { + [self stopAudioUnit]; + TVIAudioSessionDeactivated(self.renderingContext->deviceContext); + [self teardownAudioUnit]; + + free(self.capturingContext); + self.capturingContext = NULL; + + free(self.captureBuffer); + self.captureBuffer = NULL; + + free(self.renderingContext); + self.renderingContext = NULL; + } + } + + return YES; +} + +#pragma mark - TVIAudioDeviceCapturer + +- (nullable TVIAudioFormat *)captureFormat { + if (!_capturingFormat) { + + /* + * Assume that the AVAudioSession has already been configured and started and that the values + * for sampleRate and IOBufferDuration are final. + */ + _capturingFormat = [[self class] capturingFormat]; + } + + return _capturingFormat; +} + +- (BOOL)initializeCapturer { + if (_captureBuffer == NULL) { + size_t byteSize = kMaximumFramesPerBuffer * 4 * 2; + byteSize += 16; + _captureBuffer = malloc(byteSize); + } + + return YES; +} + +- (BOOL)startCapturing:(nonnull TVIAudioDeviceContext)context { + NSLog(@"%s %@", __PRETTY_FUNCTION__, self.capturingFormat); + + @synchronized(self) { + // Restart the already setup graph. + if (_voiceProcessingIO) { + [self stopAudioUnit]; + [self teardownAudioUnit]; + } + + self.wantsCapturing = YES; + if (!self.capturingContext) { + self.capturingContext = malloc(sizeof(ExampleAVPlayerCapturerContext)); + memset(self.capturingContext, 0, sizeof(ExampleAVPlayerCapturerContext)); + } + self.capturingContext->deviceContext = context; + self.capturingContext->maxFramesPerBuffer = _capturingFormat.framesPerBuffer; + self.capturingContext->audioBuffer = _captureBuffer; + + // Ensure that we wait for the audio tap buffer to become ready. + if (self.audioTapContext->audioTapPrepared) { + self.capturingContext->recordingBuffer = _audioTapCapturingBuffer; + } else { + self.capturingContext->recordingBuffer = NULL; + } + + const NSTimeInterval sessionBufferDuration = [AVAudioSession sharedInstance].IOBufferDuration; + const double sessionSampleRate = [AVAudioSession sharedInstance].sampleRate; + const size_t sessionFramesPerBuffer = (size_t)(sessionSampleRate * sessionBufferDuration + .5); + self.capturingContext->expectedFramesPerBuffer = sessionFramesPerBuffer; + + if (![self setupAudioUnitRendererContext:self.renderingContext + capturerContext:self.capturingContext]) { + free(self.capturingContext); + self.capturingContext = NULL; + self.wantsCapturing = NO; + return NO; + } else { + self.capturingContext->audioUnit = _voiceProcessingIO; + self.capturingContext->audioConverter = _captureConverter; + } + } + BOOL success = [self startAudioUnit]; + if (success) { + TVIAudioSessionActivated(context); + } + return success; +} + +- (BOOL)stopCapturing { + NSLog(@"%s", __PRETTY_FUNCTION__); + + @synchronized (self) { + NSAssert(self.capturingContext != NULL, @"We should have a capturing context when stopping."); + self.wantsCapturing = NO; + + if (!self.wantsAudio) { + [self stopAudioUnit]; + TVIAudioSessionDeactivated(self.capturingContext->deviceContext); + [self teardownAudioUnit]; + + free(self.capturingContext); + self.capturingContext = NULL; + + free(self.captureBuffer); + self.captureBuffer = NULL; + + free(self.renderingContext); + self.renderingContext = NULL; + } + } + return YES; +} + +#pragma mark - Private (AudioUnit callbacks) + +static void ExampleAVPlayerAudioDeviceDequeueFrames(TPCircularBuffer *buffer, + UInt32 numFrames, + const AudioTimeStamp *timestamp, + AudioBufferList *bufferList) { + int8_t *audioBuffer = (int8_t *)bufferList->mBuffers[0].mData; + + // TODO: Include this format in the context? What if the formats are somehow not matched? + AudioStreamBasicDescription format = {0}; + format.mBitsPerChannel = 16; + format.mChannelsPerFrame = bufferList->mBuffers[0].mNumberChannels; + format.mBytesPerFrame = format.mChannelsPerFrame * format.mBitsPerChannel / 8; + format.mFormatID = kAudioFormatLinearPCM; + format.mFormatFlags = kAudioFormatFlagIsPacked | kAudioFormatFlagIsSignedInteger; + format.mSampleRate = kPreferredSampleRate; + + UInt32 framesInOut = numFrames; + if (timestamp) { + AudioTimeStamp dequeuedTimestamp; + do { + TPCircularBufferDequeueBufferListFrames(buffer, &framesInOut, bufferList, &dequeuedTimestamp, &format); + } while (dequeuedTimestamp.mSampleTime < timestamp->mSampleTime); + } else { + TPCircularBufferDequeueBufferListFrames(buffer, &framesInOut, bufferList, NULL, &format); + } + + if (framesInOut != numFrames) { + // Render silence for the remaining frames. + UInt32 framesRemaining = numFrames - framesInOut; + UInt32 bytesRemaining = framesRemaining * format.mBytesPerFrame; + audioBuffer += format.mBytesPerFrame * framesInOut; + + memset(audioBuffer, 0, bytesRemaining); + } +} + +static OSStatus ExampleAVPlayerAudioDeviceAudioTapPlaybackCallback(void *refCon, + AudioUnitRenderActionFlags *actionFlags, + const AudioTimeStamp *timestamp, + UInt32 busNumber, + UInt32 numFrames, + AudioBufferList *bufferList) { + assert(bufferList->mNumberBuffers == 1); + assert(bufferList->mBuffers[0].mNumberChannels <= 2); + assert(bufferList->mBuffers[0].mNumberChannels > 0); + + ExampleAVPlayerRendererContext *context = (ExampleAVPlayerRendererContext *)refCon; + AudioTimeStamp startTimestamp = context->playoutStartTimestamp; + BOOL readyToPlay = (startTimestamp.mFlags & kAudioTimeStampHostTimeValid) && (timestamp->mHostTime >= startTimestamp.mHostTime); + TPCircularBuffer *buffer = context->playoutBuffer; + UInt32 audioBufferSizeInBytes = bufferList->mBuffers[0].mDataByteSize; + + // Render silence if there are temporary mismatches between CoreAudio and our rendering format. + if (numFrames > context->maxFramesPerBuffer) { + NSLog(@"Can handle a max of %u frames but got %u.", (unsigned int)context->maxFramesPerBuffer, (unsigned int)numFrames); + *actionFlags |= kAudioUnitRenderAction_OutputIsSilence; + int8_t *audioBuffer = (int8_t *)bufferList->mBuffers[0].mData; + memset(audioBuffer, 0, audioBufferSizeInBytes); + return noErr; + } else if (buffer == nil || + !readyToPlay) { + *actionFlags |= kAudioUnitRenderAction_OutputIsSilence; + memset(bufferList->mBuffers[0].mData, 0, audioBufferSizeInBytes); + return noErr; + } + + if (readyToPlay && context->playoutStartTimestamp.mSampleTime == 0) { + ExampleAVPlayerAudioDeviceDequeueFrames(buffer, numFrames, &context->playoutStartTimestamp, bufferList); + context->playoutStartTimestamp.mSampleTime += 1; + } else { + ExampleAVPlayerAudioDeviceDequeueFrames(buffer, numFrames, NULL, bufferList); + } + + return noErr; +} + +static OSStatus ExampleAVPlayerAudioDeviceAudioRendererPlaybackCallback(void *refCon, + AudioUnitRenderActionFlags *actionFlags, + const AudioTimeStamp *timestamp, + UInt32 busNumber, + UInt32 numFrames, + AudioBufferList *bufferList) { + assert(bufferList->mNumberBuffers == 1); + assert(bufferList->mBuffers[0].mNumberChannels <= 2); + assert(bufferList->mBuffers[0].mNumberChannels > 0); + + ExampleAVPlayerCapturerContext *context = (ExampleAVPlayerCapturerContext *)refCon; + int8_t *audioBuffer = (int8_t *)bufferList->mBuffers[0].mData; + UInt32 audioBufferSizeInBytes = bufferList->mBuffers[0].mDataByteSize; + + // Render silence if there are temporary mismatches between CoreAudio and our rendering format. + if (numFrames > context->maxFramesPerBuffer) { + NSLog(@"Can handle a max of %u frames but got %u.", (unsigned int)context->maxFramesPerBuffer, (unsigned int)numFrames); + *actionFlags |= kAudioUnitRenderAction_OutputIsSilence; + memset(audioBuffer, 0, audioBufferSizeInBytes); + return noErr; + } + + // Pull decoded, mixed audio data from the media engine into the AudioUnit's AudioBufferList. + assert(numFrames <= context->maxFramesPerBuffer); + assert(audioBufferSizeInBytes == (bufferList->mBuffers[0].mNumberChannels * kAudioSampleSize * numFrames)); + TVIAudioDeviceReadRenderData(context->deviceContext, audioBuffer, audioBufferSizeInBytes); + + return noErr; +} + +static OSStatus ExampleAVPlayerAudioDeviceRecordingInputCallback(void *refCon, + AudioUnitRenderActionFlags *actionFlags, + const AudioTimeStamp *timestamp, + UInt32 busNumber, + UInt32 numFrames, + AudioBufferList *bufferList) { + ExampleAVPlayerCapturerContext *context = (ExampleAVPlayerCapturerContext *)refCon; + if (context->deviceContext == NULL) { + return noErr; + } + + if (numFrames > context->maxFramesPerBuffer) { + NSLog(@"Expected %u frames but got %u.", (unsigned int)context->maxFramesPerBuffer, (unsigned int)numFrames); + return noErr; + } + + + // Render input into the IO Unit's internal buffer. + AudioBufferList microphoneBufferList; + microphoneBufferList.mNumberBuffers = 1; + + AudioBuffer *microphoneAudioBuffer = µphoneBufferList.mBuffers[0]; + microphoneAudioBuffer->mNumberChannels = 1; + microphoneAudioBuffer->mDataByteSize = (UInt32)numFrames * 2; + microphoneAudioBuffer->mData = NULL; + + OSStatus status = AudioUnitRender(context->audioUnit, + actionFlags, + timestamp, + busNumber, + numFrames, + µphoneBufferList); + if (status != noErr) { + return status; + } + + // Early return with microphone only recording. + if (context->deviceContext) { + TVIAudioDeviceWriteCaptureData(context->deviceContext, + microphoneAudioBuffer->mData, + microphoneAudioBuffer->mDataByteSize); + } + return noErr; +} + +#pragma mark - Private (AVAudioSession and CoreAudio) + ++ (nonnull TVIAudioFormat *)capturingFormat { + /* + * Use the pre-determined maximum frame size. AudioUnit callbacks are variable, and in most sitations will be close + * to the `AVAudioSession.preferredIOBufferDuration` that we've requested. + */ + return [[TVIAudioFormat alloc] initWithChannels:kPreferredNumberOfChannels + sampleRate:kPreferredSampleRate + framesPerBuffer:kMaximumFramesPerBuffer]; +} + ++ (nullable TVIAudioFormat *)activeFormat { + /* + * Use the pre-determined maximum frame size. AudioUnit callbacks are variable, and in most sitations will be close + * to the `AVAudioSession.preferredIOBufferDuration` that we've requested. + */ + const size_t sessionFramesPerBuffer = kMaximumFramesPerBuffer; + const double sessionSampleRate = [AVAudioSession sharedInstance].sampleRate; + const NSInteger sessionOutputChannels = [AVAudioSession sharedInstance].outputNumberOfChannels; + size_t rendererChannels = sessionOutputChannels >= TVIAudioChannelsStereo ? TVIAudioChannelsStereo : TVIAudioChannelsMono; + + return [[TVIAudioFormat alloc] initWithChannels:rendererChannels + sampleRate:sessionSampleRate + framesPerBuffer:sessionFramesPerBuffer]; +} + ++ (AudioComponentDescription)audioUnitDescription { + AudioComponentDescription audioUnitDescription; + audioUnitDescription.componentType = kAudioUnitType_Output; + audioUnitDescription.componentSubType = kAudioUnitSubType_VoiceProcessingIO; + audioUnitDescription.componentManufacturer = kAudioUnitManufacturer_Apple; + audioUnitDescription.componentFlags = 0; + audioUnitDescription.componentFlagsMask = 0; + return audioUnitDescription; +} + ++ (AudioComponentDescription)mixerAudioCompontentDescription { + AudioComponentDescription audioUnitDescription; + audioUnitDescription.componentType = kAudioUnitType_Mixer; + audioUnitDescription.componentSubType = kAudioUnitSubType_MultiChannelMixer; + audioUnitDescription.componentManufacturer = kAudioUnitManufacturer_Apple; + audioUnitDescription.componentFlags = 0; + audioUnitDescription.componentFlagsMask = 0; + return audioUnitDescription; +} + ++ (AudioComponentDescription)genericOutputAudioCompontentDescription { + AudioComponentDescription audioUnitDescription; + audioUnitDescription.componentType = kAudioUnitType_Output; + audioUnitDescription.componentSubType = kAudioUnitSubType_GenericOutput; + audioUnitDescription.componentManufacturer = kAudioUnitManufacturer_Apple; + audioUnitDescription.componentFlags = 0; + audioUnitDescription.componentFlagsMask = 0; + return audioUnitDescription; +} + +- (void)setupAVAudioSession { + AVAudioSession *session = [AVAudioSession sharedInstance]; + NSError *error = nil; + + if (![session setPreferredSampleRate:kPreferredSampleRate error:&error]) { + NSLog(@"Error setting sample rate: %@", error); + } + + size_t preferredOutputChannels = session.outputNumberOfChannels >= kPreferredNumberOfChannels ? kPreferredNumberOfChannels : session.outputNumberOfChannels; + if (![session setPreferredOutputNumberOfChannels:preferredOutputChannels error:&error]) { + NSLog(@"Error setting number of output channels to %zu: %@", preferredOutputChannels, error); + } + + /* + * We want to be as close as possible to the buffer size that the media engine needs. If there is + * a mismatch then TwilioVideo will ensure that appropriately sized audio buffers are delivered. + */ + if (![session setPreferredIOBufferDuration:kPreferredIOBufferDuration error:&error]) { + NSLog(@"Error setting IOBuffer duration: %@", error); + } + + if (![session setCategory:AVAudioSessionCategoryPlayAndRecord error:&error]) { + NSLog(@"Error setting session category: %@", error); + } + + if (![session setMode:AVAudioSessionModeVideoChat error:&error]) { + NSLog(@"Error setting session category: %@", error); + } + + [self registerAVAudioSessionObservers]; + + if (![session setActive:YES error:&error]) { + NSLog(@"Error activating AVAudioSession: %@", error); + } + + if (![session setPreferredInputNumberOfChannels:kPreferredNumberOfInputChannels error:&error]) { + NSLog(@"Error setting preferred number of input channels to %zu: %@", kPreferredNumberOfChannels, error); + } +} + +- (AudioStreamBasicDescription)microphoneInputStreamDescription { + AudioStreamBasicDescription formatDescription = self.capturingFormat.streamDescription; + formatDescription.mBytesPerFrame = 2; + formatDescription.mBytesPerPacket = 2; + formatDescription.mChannelsPerFrame = 1; + return formatDescription; +} + +- (AudioStreamBasicDescription)nonInterleavedStereoStreamDescription { + AudioStreamBasicDescription formatDescription = self.capturingFormat.streamDescription; + formatDescription.mBytesPerFrame = 2; + formatDescription.mBytesPerPacket = 2; + formatDescription.mChannelsPerFrame = 2; + formatDescription.mFormatFlags |= kAudioFormatFlagIsNonInterleaved; + return formatDescription; +} + +- (OSStatus)setupAudioCapturer:(ExampleAVPlayerCapturerContext *)capturerContext { + UInt32 enableInput = capturerContext ? 1 : 0; + OSStatus status = AudioUnitSetProperty(_voiceProcessingIO, kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Input, kInputBus, &enableInput, + sizeof(enableInput)); + + if (status != noErr) { + NSLog(@"Could not enable/disable input bus!"); + AudioComponentInstanceDispose(_voiceProcessingIO); + _voiceProcessingIO = NULL; + return status; + } else if (!enableInput) { + // Input is not required. + return noErr; + } + + // Request mono audio capture regardless of hardware. + AudioStreamBasicDescription capturingFormatDescription = [self microphoneInputStreamDescription]; + + // Our converter will interleave the mono microphone input and player audio in one stereo stream. + if (_captureConverter == NULL) { + AudioStreamBasicDescription sourceFormat = [self nonInterleavedStereoStreamDescription]; + AudioStreamBasicDescription destinationFormat = [self.capturingFormat streamDescription]; + OSStatus status = AudioConverterNew(&sourceFormat, + &destinationFormat, + &_captureConverter); + if (status != noErr) { + NSLog(@"Could not create capture converter! code: %d", status); + return status; + } + } + + status = AudioUnitSetProperty(_voiceProcessingIO, kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Output, kInputBus, + &capturingFormatDescription, sizeof(capturingFormatDescription)); + if (status != noErr) { + NSLog(@"Could not set stream format on the input bus!"); + AudioComponentInstanceDispose(_voiceProcessingIO); + _voiceProcessingIO = NULL; + return status; + } + + // Setup the I/O input callback. + AURenderCallbackStruct capturerCallback; + capturerCallback.inputProc = ExampleAVPlayerAudioDeviceRecordingInputCallback; + capturerCallback.inputProcRefCon = (void *)(capturerContext); + status = AudioUnitSetProperty(_voiceProcessingIO, kAudioOutputUnitProperty_SetInputCallback, + kAudioUnitScope_Global, kInputBus, &capturerCallback, + sizeof(capturerCallback)); + if (status != noErr) { + NSLog(@"Could not set capturing callback!"); + AudioComponentInstanceDispose(_voiceProcessingIO); + _voiceProcessingIO = NULL; + return status; + } + + return status; +} + +- (BOOL)setupAudioUnitRendererContext:(ExampleAVPlayerRendererContext *)rendererContext + capturerContext:(ExampleAVPlayerCapturerContext *)capturerContext { + AudioComponentDescription audioUnitDescription = [[self class] audioUnitDescription]; + AudioComponent audioComponent = AudioComponentFindNext(NULL, &audioUnitDescription); + + OSStatus status = AudioComponentInstanceNew(audioComponent, &_voiceProcessingIO); + if (status != noErr) { + NSLog(@"Could not find the AudioComponent instance!"); + return NO; + } + + /* + * Configure the VoiceProcessingIO audio unit. Our rendering format attempts to match what AVAudioSession requires to + * prevent any additional format conversions after the media engine has mixed our playout audio. + */ + UInt32 enableOutput = rendererContext ? 1 : 0; + status = AudioUnitSetProperty(_voiceProcessingIO, kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Output, kOutputBus, + &enableOutput, sizeof(enableOutput)); + if (status != noErr) { + NSLog(@"Could not enable/disable output bus!"); + AudioComponentInstanceDispose(_voiceProcessingIO); + _voiceProcessingIO = NULL; + return NO; + } + + if (enableOutput) { + AudioStreamBasicDescription renderingFormatDescription = self.renderingFormat.streamDescription; + AudioStreamBasicDescription playerFormatDescription = renderingFormatDescription; + if (self.renderingContext->playoutBuffer) { + playerFormatDescription = self.audioTapContext->renderingFormat; + } + + // Setup playback mixer. + AudioComponentDescription mixerComponentDescription = [[self class] mixerAudioCompontentDescription]; + AudioComponent mixerComponent = AudioComponentFindNext(NULL, &mixerComponentDescription); + + OSStatus status = AudioComponentInstanceNew(mixerComponent, &_playbackMixer); + if (status != noErr) { + NSLog(@"Could not find the mixer AudioComponent instance!"); + return NO; + } + + // Configure the mixer's output format. + status = AudioUnitSetProperty(_playbackMixer, kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Output, kOutputBus, + &renderingFormatDescription, sizeof(renderingFormatDescription)); + if (status != noErr) { + NSLog(@"Could not set stream format on the mixer output bus!"); + AudioComponentInstanceDispose(_voiceProcessingIO); + _voiceProcessingIO = NULL; + return NO; + } + + status = AudioUnitSetProperty(_playbackMixer, kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Input, 0, + &playerFormatDescription, sizeof(playerFormatDescription)); + if (status != noErr) { + NSLog(@"Could not set stream format on the mixer input bus 0!"); + AudioComponentInstanceDispose(_voiceProcessingIO); + _voiceProcessingIO = NULL; + return NO; + } + + status = AudioUnitSetProperty(_playbackMixer, kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Input, 1, + &renderingFormatDescription, sizeof(renderingFormatDescription)); + if (status != noErr) { + NSLog(@"Could not set stream format on the mixer input bus 1!"); + AudioComponentInstanceDispose(_voiceProcessingIO); + _voiceProcessingIO = NULL; + return NO; + } + + // Connection: Mixer Output 0 -> VoiceProcessingIO Input Scope, Output Bus + AudioUnitConnection mixerOutputConnection; + mixerOutputConnection.sourceAudioUnit = _playbackMixer; + mixerOutputConnection.sourceOutputNumber = kOutputBus; + mixerOutputConnection.destInputNumber = kOutputBus; + + status = AudioUnitSetProperty(_voiceProcessingIO, kAudioUnitProperty_MakeConnection, + kAudioUnitScope_Input, kOutputBus, + &mixerOutputConnection, sizeof(mixerOutputConnection)); + if (status != noErr) { + NSLog(@"Could not connect the mixer output to voice processing input!"); + AudioComponentInstanceDispose(_voiceProcessingIO); + _voiceProcessingIO = NULL; + return NO; + } + + status = AudioUnitSetProperty(_voiceProcessingIO, kAudioUnitProperty_StreamFormat, + kAudioUnitScope_Input, kOutputBus, + &renderingFormatDescription, sizeof(renderingFormatDescription)); + if (status != noErr) { + NSLog(@"Could not set stream format on the output bus!"); + AudioComponentInstanceDispose(_voiceProcessingIO); + _voiceProcessingIO = NULL; + return NO; + } + + // Setup the rendering callbacks. + UInt32 elementCount = 2; + status = AudioUnitSetProperty(_playbackMixer, kAudioUnitProperty_ElementCount, + kAudioUnitScope_Input, 0, &elementCount, + sizeof(elementCount)); + if (status != 0) { + NSLog(@"Could not set input element count!"); + AudioComponentInstanceDispose(_voiceProcessingIO); + _voiceProcessingIO = NULL; + return NO; + } + + AURenderCallbackStruct audioTapRenderCallback; + audioTapRenderCallback.inputProc = ExampleAVPlayerAudioDeviceAudioTapPlaybackCallback; + audioTapRenderCallback.inputProcRefCon = (void *)(rendererContext); + status = AudioUnitSetProperty(_playbackMixer, kAudioUnitProperty_SetRenderCallback, + kAudioUnitScope_Input, 0, &audioTapRenderCallback, + sizeof(audioTapRenderCallback)); + if (status != 0) { + NSLog(@"Could not set audio tap rendering callback!"); + AudioComponentInstanceDispose(_voiceProcessingIO); + _voiceProcessingIO = NULL; + return NO; + } + + AURenderCallbackStruct audioRendererRenderCallback; + audioRendererRenderCallback.inputProc = ExampleAVPlayerAudioDeviceAudioRendererPlaybackCallback; + audioRendererRenderCallback.inputProcRefCon = (void *)(rendererContext); + status = AudioUnitSetProperty(_playbackMixer, kAudioUnitProperty_SetRenderCallback, + kAudioUnitScope_Input, 1, &audioRendererRenderCallback, + sizeof(audioRendererRenderCallback)); + if (status != 0) { + NSLog(@"Could not set audio renderer rendering callback!"); + AudioComponentInstanceDispose(_voiceProcessingIO); + _voiceProcessingIO = NULL; + return NO; + } + } + + [self setupAudioCapturer:self.capturingContext]; + + // Finally, initialize the IO audio unit and mixer (if present). + status = AudioUnitInitialize(_voiceProcessingIO); + if (status != noErr) { + NSLog(@"Could not initialize the audio unit!"); + AudioComponentInstanceDispose(_voiceProcessingIO); + _voiceProcessingIO = NULL; + return NO; + } + + if (_playbackMixer) { + status = AudioUnitInitialize(_playbackMixer); + if (status != noErr) { + NSLog(@"Could not initialize the playback mixer audio unit!"); + AudioComponentInstanceDispose(_playbackMixer); + _playbackMixer = NULL; + return NO; + } + } + + return YES; +} + +- (BOOL)startAudioUnit { + OSStatus status = AudioOutputUnitStart(_voiceProcessingIO); + if (status != noErr) { + NSLog(@"Could not start the audio unit. code: %d", status); + return NO; + } + + return YES; +} + +- (BOOL)stopAudioUnit { + OSStatus status = AudioOutputUnitStop(_voiceProcessingIO); + if (status != noErr) { + NSLog(@"Could not stop the audio unit. code: %d", status); + return NO; + } + + return YES; +} + +- (void)teardownAudioUnit { + if (_voiceProcessingIO) { + AudioUnitUninitialize(_voiceProcessingIO); + AudioComponentInstanceDispose(_voiceProcessingIO); + _voiceProcessingIO = NULL; + } + + if (_playbackMixer) { + AudioUnitUninitialize(_playbackMixer); + AudioComponentInstanceDispose(_playbackMixer); + _playbackMixer = NULL; + } + + if (_captureConverter == NULL) { + AudioConverterDispose(_captureConverter); + _captureConverter = NULL; + } +} + +- (void)restartAudioUnitAtTime:(CMTime)startTime { + BOOL restart = NO; + + AudioTimeStamp startTimestamp = {0}; + startTimestamp.mFlags = kAudioTimeStampHostTimeValid; + startTimestamp.mHostTime = CMClockConvertHostTimeToSystemUnits(startTime); + self.renderingContext->playoutStartTimestamp = startTimestamp; + + // TODO: Assumption, pass as an arg using the asset's current time and audio timescale? + AudioTimeStamp sampleTimestamp = {0}; + sampleTimestamp.mFlags = kAudioTimeStampSampleTimeValid; + sampleTimestamp.mSampleTime = 0; + + @synchronized (self) { + if (self.wantsAudio) { + restart = YES; + [self stopAudioUnit]; + [self teardownAudioUnit]; + if (self.renderingContext) { + self.renderingContext->playoutBuffer = _audioTapRenderingBuffer; + self.renderingContext->playoutSampleTimestamp = sampleTimestamp; + } + if (self.capturingContext) { + self.capturingContext->recordingBuffer = _audioTapCapturingBuffer; + } + if ([self setupAudioUnitRendererContext:self.renderingContext + capturerContext:self.capturingContext]) { + if (self.capturingContext) { + self.capturingContext->audioUnit = _voiceProcessingIO; + self.capturingContext->audioConverter = _captureConverter; + } + } else { + return; + } + } + } + + [self startAudioUnit]; +} + +#pragma mark - NSNotification Observers + +- (void)registerAVAudioSessionObservers { + // An audio device that interacts with AVAudioSession should handle events like interruptions and route changes. + NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; + + [center addObserver:self selector:@selector(handleAudioInterruption:) name:AVAudioSessionInterruptionNotification object:nil]; + /* + * Interruption handling is different on iOS 9.x. If your application becomes interrupted while it is in the + * background then you will not get a corresponding notification when the interruption ends. We workaround this + * by handling UIApplicationDidBecomeActiveNotification and treating it as an interruption end. + */ + if (![[NSProcessInfo processInfo] isOperatingSystemAtLeastVersion:(NSOperatingSystemVersion){10, 0, 0}]) { + [center addObserver:self selector:@selector(handleApplicationDidBecomeActive:) name:UIApplicationDidBecomeActiveNotification object:nil]; + } + + [center addObserver:self selector:@selector(handleRouteChange:) name:AVAudioSessionRouteChangeNotification object:nil]; + [center addObserver:self selector:@selector(handleMediaServiceLost:) name:AVAudioSessionMediaServicesWereLostNotification object:nil]; + [center addObserver:self selector:@selector(handleMediaServiceRestored:) name:AVAudioSessionMediaServicesWereResetNotification object:nil]; +} + +- (void)handleAudioInterruption:(NSNotification *)notification { + AVAudioSessionInterruptionType type = [notification.userInfo[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue]; + + @synchronized(self) { + // TODO: Multiple contexts. + // If the worker block is executed, then context is guaranteed to be valid. + TVIAudioDeviceContext context = self.renderingContext ? self.renderingContext->deviceContext : NULL; + if (context) { + TVIAudioDeviceExecuteWorkerBlock(context, ^{ + if (type == AVAudioSessionInterruptionTypeBegan) { + NSLog(@"Interruption began."); + self.interrupted = YES; + [self stopAudioUnit]; + TVIAudioSessionDeactivated(context); + } else { + NSLog(@"Interruption ended."); + self.interrupted = NO; + if ([self startAudioUnit]) { + TVIAudioSessionActivated(context); + } + } + }); + } + } +} + +- (void)handleApplicationDidBecomeActive:(NSNotification *)notification { + @synchronized(self) { + // If the worker block is executed, then context is guaranteed to be valid. + TVIAudioDeviceContext context = self.renderingContext ? self.renderingContext->deviceContext : NULL; + if (context) { + TVIAudioDeviceExecuteWorkerBlock(context, ^{ + if (self.isInterrupted) { + NSLog(@"Synthesizing an interruption ended event for iOS 9.x devices."); + self.interrupted = NO; + if ([self startAudioUnit]) { + TVIAudioSessionActivated(context); + } + } + }); + } + } +} + +- (void)handleRouteChange:(NSNotification *)notification { + // Check if the sample rate, or channels changed and trigger a format change if it did. + AVAudioSessionRouteChangeReason reason = [notification.userInfo[AVAudioSessionRouteChangeReasonKey] unsignedIntegerValue]; + + switch (reason) { + case AVAudioSessionRouteChangeReasonUnknown: + case AVAudioSessionRouteChangeReasonNewDeviceAvailable: + case AVAudioSessionRouteChangeReasonOldDeviceUnavailable: + // Each device change might cause the actual sample rate or channel configuration of the session to change. + case AVAudioSessionRouteChangeReasonCategoryChange: + // In iOS 9.2+ switching routes from a BT device in control center may cause a category change. + case AVAudioSessionRouteChangeReasonOverride: + case AVAudioSessionRouteChangeReasonWakeFromSleep: + case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory: + case AVAudioSessionRouteChangeReasonRouteConfigurationChange: + // With CallKit, AVAudioSession may change the sample rate during a configuration change. + // If a valid route change occurs we may want to update our audio graph to reflect the new output device. + @synchronized(self) { + // TODO: Contexts + if (self.renderingContext) { + TVIAudioDeviceExecuteWorkerBlock(self.renderingContext->deviceContext, ^{ + [self handleValidRouteChange]; + }); + } + } + break; + } +} + +- (void)handleValidRouteChange { + // Nothing to process while we are interrupted. We will interrogate the AVAudioSession once the interruption ends. + if (self.isInterrupted) { + return; + } else if (_voiceProcessingIO == NULL) { + return; + } + + NSLog(@"A route change ocurred while the AudioUnit was started. Checking the active audio format."); + + // Determine if the format actually changed. We only care about sample rate and number of channels. + TVIAudioFormat *activeFormat = [[self class] activeFormat]; + + if (![activeFormat isEqual:_renderingFormat]) { + NSLog(@"The rendering format changed. Restarting with %@", activeFormat); + // Signal a change by clearing our cached format, and allowing TVIAudioDevice to drive the process. + _renderingFormat = nil; + + @synchronized(self) { + if (self.renderingContext) { + TVIAudioDeviceFormatChanged(self.renderingContext->deviceContext); + } else if (self.capturingContext) { + TVIAudioDeviceFormatChanged(self.capturingContext->deviceContext); + } + } + } +} + +- (void)handleMediaServiceLost:(NSNotification *)notification { + @synchronized(self) { + // TODO: Contexts. + if (self.renderingContext) { + TVIAudioDeviceExecuteWorkerBlock(self.renderingContext->deviceContext, ^{ + [self stopAudioUnit]; + TVIAudioSessionDeactivated(self.renderingContext->deviceContext); + }); + } + } +} + +- (void)handleMediaServiceRestored:(NSNotification *)notification { + @synchronized(self) { + // If the worker block is executed, then context is guaranteed to be valid. + TVIAudioDeviceContext context = self.renderingContext ? self.renderingContext->deviceContext : NULL; + if (context) { + TVIAudioDeviceExecuteWorkerBlock(context, ^{ + if ([self startAudioUnit]) { + TVIAudioSessionActivated(context); + } + }); + } + } +} + +- (void)unregisterAVAudioSessionObservers { + [[NSNotificationCenter defaultCenter] removeObserver:self]; +} + +@end diff --git a/AVPlayerExample/AudioDevices/ExampleAVPlayerProcessingTap.h b/AVPlayerExample/AudioDevices/ExampleAVPlayerProcessingTap.h new file mode 100755 index 00000000..4ee82f7a --- /dev/null +++ b/AVPlayerExample/AudioDevices/ExampleAVPlayerProcessingTap.h @@ -0,0 +1,47 @@ +// +// ExampleAVPlayerProcessingTap.h +// AVPlayerExample +// +// Copyright © 2018 Twilio Inc. All rights reserved. +// + +#import +#import + +@class ExampleAVPlayerAudioDevice; + +typedef struct ExampleAVPlayerAudioTapContext { + __weak ExampleAVPlayerAudioDevice *audioDevice; + BOOL audioTapPrepared; + + TPCircularBuffer *capturingBuffer; + AudioConverterRef captureFormatConverter; + BOOL capturingSampleRateConversion; + BOOL captureFormatConvertIsPrimed; + + TPCircularBuffer *renderingBuffer; + AudioConverterRef renderFormatConverter; + AudioStreamBasicDescription renderingFormat; + + // Cached source audio, in case we need to perform a sample rate conversion and can't consume all the samples in one go. + AudioBufferList *sourceCache; + UInt32 sourceCacheFrames; + AudioStreamBasicDescription sourceFormat; +} ExampleAVPlayerAudioTapContext; + +void AVPlayerProcessingTapInit(MTAudioProcessingTapRef tap, void *clientInfo, void **tapStorageOut); + +void AVPlayerProcessingTapFinalize(MTAudioProcessingTapRef tap); + +void AVPlayerProcessingTapPrepare(MTAudioProcessingTapRef tap, + CMItemCount maxFrames, + const AudioStreamBasicDescription *processingFormat); + +void AVPlayerProcessingTapUnprepare(MTAudioProcessingTapRef tap); + +void AVPlayerProcessingTapProcess(MTAudioProcessingTapRef tap, + CMItemCount numberFrames, + MTAudioProcessingTapFlags flags, + AudioBufferList *bufferListInOut, + CMItemCount *numberFramesOut, + MTAudioProcessingTapFlags *flagsOut); diff --git a/AVPlayerExample/AudioDevices/ExampleAVPlayerProcessingTap.m b/AVPlayerExample/AudioDevices/ExampleAVPlayerProcessingTap.m new file mode 100755 index 00000000..207b9d20 --- /dev/null +++ b/AVPlayerExample/AudioDevices/ExampleAVPlayerProcessingTap.m @@ -0,0 +1,395 @@ +// +// ExampleAVPlayerProcessingTap.m +// AVPlayerExample +// +// Copyright © 2018 Twilio Inc. All rights reserved. +// + +#import "ExampleAVPlayerProcessingTap.h" + +#import "ExampleAVPlayerAudioDevice.h" +#import "TPCircularBuffer+AudioBufferList.h" + +static size_t const kPreferredNumberOfChannels = 2; +static uint32_t const kPreferredSampleRate = 48000; + +typedef struct ExampleAVPlayerAudioConverterContext { + AudioBufferList *cacheBuffers; + UInt32 cachePackets; + AudioBufferList *sourceBuffers; + // Keep track if we are iterating through the source to provide data to a converter. + UInt32 sourcePackets; + UInt32 sourcePacketIndex; +} ExampleAVPlayerAudioConverterContext; + +AudioBufferList *AudioBufferListCreate(const AudioStreamBasicDescription *audioFormat, int frameCount) { + int numberOfBuffers = audioFormat->mFormatFlags & kAudioFormatFlagIsNonInterleaved ? audioFormat->mChannelsPerFrame : 1; + AudioBufferList *audio = malloc(sizeof(AudioBufferList) + (numberOfBuffers - 1) * sizeof(AudioBuffer)); + if (!audio) { + return NULL; + } + audio->mNumberBuffers = numberOfBuffers; + + int channelsPerBuffer = audioFormat->mFormatFlags & kAudioFormatFlagIsNonInterleaved ? 1 : audioFormat->mChannelsPerFrame; + int bytesPerBuffer = audioFormat->mBytesPerFrame * frameCount; + for (int i = 0; i < numberOfBuffers; i++) { + if (bytesPerBuffer > 0) { + audio->mBuffers[i].mData = calloc(bytesPerBuffer, 1); + if (!audio->mBuffers[i].mData) { + for (int j = 0; j < i; j++ ) { + free(audio->mBuffers[j].mData); + } + free(audio); + return NULL; + } + } else { + audio->mBuffers[i].mData = NULL; + } + audio->mBuffers[i].mDataByteSize = bytesPerBuffer; + audio->mBuffers[i].mNumberChannels = channelsPerBuffer; + } + return audio; +} + +void AudioBufferListFree(AudioBufferList *bufferList ) { + for (int i=0; imNumberBuffers; i++) { + if (bufferList->mBuffers[i].mData != NULL) { + free(bufferList->mBuffers[i].mData); + } + } + free(bufferList); +} + +OSStatus AVPlayerAudioTapConverterInputDataProc(AudioConverterRef inAudioConverter, + UInt32 *ioNumberDataPackets, + AudioBufferList *ioData, + AudioStreamPacketDescription * _Nullable *outDataPacketDescription, + void *inUserData) { + UInt32 bytesPerChannel = 4; + + // Give the converter what they asked for. They might not consume all of our source in one callback. + UInt32 minimumPackets = *ioNumberDataPackets; + ExampleAVPlayerAudioConverterContext *context = inUserData; + + assert(context->sourcePackets + context->cachePackets >= *ioNumberDataPackets); +// printf("Convert at least %d input packets. We have %d source packets, %d cached packets.\n", *ioNumberDataPackets, context->sourcePackets, context->cachePackets); + AudioBufferList *sourceBufferList = (AudioBufferList *)context->sourceBuffers; + AudioBufferList *cacheBufferList = (AudioBufferList *)context->cacheBuffers; + assert(sourceBufferList->mNumberBuffers == ioData->mNumberBuffers); + + for (UInt32 i = 0; i < sourceBufferList->mNumberBuffers; i++) { + if (context->cachePackets > 0) { + AudioBuffer *cacheBuffer = &cacheBufferList->mBuffers[i]; + AudioBuffer *outputBuffer = &ioData->mBuffers[i]; + UInt32 cachedBytes = context->cachePackets * bytesPerChannel; + UInt32 cachedFrames = context->cachePackets; + outputBuffer->mNumberChannels = cacheBuffer->mNumberChannels; + outputBuffer->mDataByteSize = cachedBytes; + outputBuffer->mData = cacheBuffer->mData; + *ioNumberDataPackets = cachedFrames; + } else { + UInt32 sourceFrames = minimumPackets; + UInt32 sourceBytes = sourceFrames * bytesPerChannel; + + AudioBuffer *sourceBuffer = &sourceBufferList->mBuffers[i]; + AudioBuffer *outputBuffer = &ioData->mBuffers[i]; + outputBuffer->mNumberChannels = sourceBuffer->mNumberChannels; + outputBuffer->mDataByteSize = sourceBytes; + outputBuffer->mData = sourceBuffer->mData + (context->sourcePacketIndex * bytesPerChannel * sourceBuffer->mNumberChannels); + } + } + + if (context->cachePackets > 0) { + context->cachePackets = 0; + } else { + context->sourcePacketIndex += *ioNumberDataPackets; + } + +// if (context->sourcePackets - minimumPackets > 0) { +// // Copy the remainder of the source which was not used into the front of our cache. +// +// UInt32 packetsToCopy = context->sourcePackets - minimumPackets; +// for (UInt32 i = 0; i < sourceBufferList->mNumberBuffers; i++) { +// AudioBuffer *cacheBuffer = &cacheBufferList->mBuffers[i]; +// AudioBuffer *sourceBuffer = &sourceBufferList->mBuffers[i]; +// assert(cacheBuffer->mDataByteSize >= sourceBuffer->mDataByteSize); +// UInt32 bytesToCopy = packetsToCopy * bytesPerChannel; +// void *sourceData = sourceBuffer->mData + (minimumPackets * bytesPerChannel); +// memcpy(cacheBuffer->mData, sourceData, bytesToCopy); +// } +// context->cachePackets = packetsToCopy; +// } + + return noErr; +} + +static inline void AVPlayerAudioTapProduceFilledFrames(TPCircularBuffer *buffer, + AudioConverterRef converter, + BOOL isConverterPrimed, + AudioBufferList *bufferListIn, + AudioBufferList *sourceCache, + UInt32 *cachedSourceFrames, + UInt32 framesIn, + UInt32 channelsOut) { + // Start with input buffer size as our argument. + // TODO: Does non-interleaving count towards the size (*2)? + // Give us a little more priming than we need (~8 frames). + UInt32 primeFrames = 8; + UInt32 sourceFrames = framesIn; + if (!isConverterPrimed) { + framesIn -= primeFrames; + } else if (*cachedSourceFrames > 0) { + framesIn += *cachedSourceFrames; + } + UInt32 desiredIoBufferSize = framesIn * 4 * bufferListIn->mNumberBuffers; +// printf("Input is %d bytes (%d total frames, %d cached frames).\n", desiredIoBufferSize, framesIn, *cachedSourceFrames); + UInt32 propertySizeIo = sizeof(desiredIoBufferSize); + OSStatus status = AudioConverterGetProperty(converter, + kAudioConverterPropertyCalculateOutputBufferSize, + &propertySizeIo, &desiredIoBufferSize); + + UInt32 bytesPerFrameOut = channelsOut * sizeof(SInt16); + UInt32 framesOut = (desiredIoBufferSize) / bytesPerFrameOut; +// UInt32 framesOut = (desiredIoBufferSize + (bytesPerFrameOut - 1)) / bytesPerFrameOut; +// framesOut += framesOut % 2; + UInt32 bytesOut = framesOut * bytesPerFrameOut; +// printf("Converter wants an output of %d bytes (%d frames, %d bytes per frames).\n", +// desiredIoBufferSize, framesOut, bytesPerFrameOut); + + AudioBufferList *producerBufferList = TPCircularBufferPrepareEmptyAudioBufferList(buffer, 1, bytesOut, NULL); + if (producerBufferList == NULL) { + return; + } + producerBufferList->mBuffers[0].mNumberChannels = channelsOut; + + UInt32 ioPacketSize = framesOut; +// printf("Ready to fill output buffer of frames: %d, bytes: %d with input buffer of frames: %d, bytes: %d.\n", +// framesOut, bytesOut, framesIn, framesIn * 4 * bufferListIn->mNumberBuffers); + ExampleAVPlayerAudioConverterContext context; + context.sourceBuffers = bufferListIn; + context.cacheBuffers = sourceCache; + context.sourcePackets = sourceFrames; + context.sourcePacketIndex = 0; + context.cachePackets = *cachedSourceFrames; + status = AudioConverterFillComplexBuffer(converter, + AVPlayerAudioTapConverterInputDataProc, + &context, + &ioPacketSize, + producerBufferList, + NULL); + // Adjust for what the format converter actually produced, in case it was different than what we asked for. + producerBufferList->mBuffers[0].mDataByteSize = ioPacketSize * bytesPerFrameOut; +// printf("Output was: %d packets / %d bytes. Consumed input packets: %d. Cached input packets: %d.\n", +// ioPacketSize, ioPacketSize * bytesPerFrameOut, context.sourcePackets, context.cachePackets); + + // TODO: Do we still produce the buffer list after a failure? + if (status == kCVReturnSuccess) { + *cachedSourceFrames = context.cachePackets; + TPCircularBufferProduceAudioBufferList(buffer, NULL); + } else { + printf("Error converting buffers: %d\n", status); + } +} + +static inline void AVPlayerAudioTapProduceConvertedFrames(TPCircularBuffer *buffer, + AudioConverterRef converter, + AudioBufferList *bufferListIn, + UInt32 framesIn, + CMTimeRange *sourceRangeIn, + UInt32 channelsOut) { + UInt32 bytesOut = framesIn * channelsOut * 2; + AudioBufferList *producerBufferList = TPCircularBufferPrepareEmptyAudioBufferList(buffer, 1, bytesOut, NULL); + if (producerBufferList == NULL) { + return; + } + producerBufferList->mBuffers[0].mNumberChannels = channelsOut; + + OSStatus status = AudioConverterConvertComplexBuffer(converter, + framesIn, + bufferListIn, + producerBufferList); + + // TODO: Do we still produce the buffer list after a failure? + if (status == kCVReturnSuccess) { + AudioTimeStamp timestamp = {0}; + timestamp.mFlags = kAudioTimeStampSampleTimeValid; + timestamp.mSampleTime = sourceRangeIn->start.value; + TPCircularBufferProduceAudioBufferList(buffer, ×tamp); + } else { + printf("Error converting buffers: %d\n", status); + } +} + +#pragma mark - MTAudioProcessingTap + +void AVPlayerProcessingTapInit(MTAudioProcessingTapRef tap, void *clientInfo, void **tapStorageOut) { + NSLog(@"Init audio tap."); + + // Provide access to our device in the Callbacks. + *tapStorageOut = clientInfo; +} + +void AVPlayerProcessingTapFinalize(MTAudioProcessingTapRef tap) { + NSLog(@"Finalize audio tap."); + + ExampleAVPlayerAudioTapContext *context = (ExampleAVPlayerAudioTapContext *)MTAudioProcessingTapGetStorage(tap); + context->audioTapPrepared = NO; + TPCircularBuffer *capturingBuffer = context->capturingBuffer; + TPCircularBuffer *renderingBuffer = context->renderingBuffer; + TPCircularBufferCleanup(capturingBuffer); + TPCircularBufferCleanup(renderingBuffer); +} + +void AVPlayerProcessingTapPrepare(MTAudioProcessingTapRef tap, + CMItemCount maxFrames, + const AudioStreamBasicDescription *processingFormat) { + NSLog(@"Preparing with frames: %d, channels: %d, bits/channel: %d, sample rate: %0.1f", + (int)maxFrames, processingFormat->mChannelsPerFrame, processingFormat->mBitsPerChannel, processingFormat->mSampleRate); + assert(processingFormat->mFormatID == kAudioFormatLinearPCM); + + // Defer init of the ring buffer memory until we understand the processing format. + ExampleAVPlayerAudioTapContext *context = (ExampleAVPlayerAudioTapContext *)MTAudioProcessingTapGetStorage(tap); + TPCircularBuffer *capturingBuffer = context->capturingBuffer; + TPCircularBuffer *renderingBuffer = context->renderingBuffer; + + size_t bufferSize = processingFormat->mBytesPerFrame * maxFrames; + // We need to add some overhead for the AudioBufferList data structures. + bufferSize += 2048; + // TODO: Size the buffer appropriately, as we may need to accumulate more than maxFrames due to bursty processing. + bufferSize *= 20; + + // TODO: If we are re-allocating then check the size? + TPCircularBufferInit(capturingBuffer, bufferSize); + TPCircularBufferInit(renderingBuffer, bufferSize); + + AudioBufferList *cacheBufferList = AudioBufferListCreate(processingFormat, (int)maxFrames); + context->sourceCache = cacheBufferList; + context->sourceCacheFrames = 0; + context->sourceFormat = *processingFormat; + + TVIAudioFormat *playbackFormat = [[TVIAudioFormat alloc] initWithChannels:processingFormat->mChannelsPerFrame + sampleRate:processingFormat->mSampleRate + framesPerBuffer:maxFrames]; + AudioStreamBasicDescription preferredPlaybackDescription = [playbackFormat streamDescription]; + BOOL requiresFormatConversion = preferredPlaybackDescription.mFormatFlags != processingFormat->mFormatFlags; + + context->renderingFormat = preferredPlaybackDescription; + + if (requiresFormatConversion) { + OSStatus status = AudioConverterNew(processingFormat, &preferredPlaybackDescription, &context->renderFormatConverter); + if (status != 0) { + NSLog(@"Failed to create AudioConverter: %d", (int)status); + return; + } + } + + TVIAudioFormat *recordingFormat = [[TVIAudioFormat alloc] initWithChannels:kPreferredNumberOfChannels + sampleRate:(Float64)kPreferredSampleRate + framesPerBuffer:maxFrames]; + AudioStreamBasicDescription preferredRecordingDescription = [recordingFormat streamDescription]; + BOOL requiresSampleRateConversion = processingFormat->mSampleRate != preferredRecordingDescription.mSampleRate; + context->capturingSampleRateConversion = requiresSampleRateConversion; + + if (requiresFormatConversion || requiresSampleRateConversion) { + OSStatus status = AudioConverterNew(processingFormat, &preferredRecordingDescription, &context->captureFormatConverter); + if (status != 0) { + NSLog(@"Failed to create AudioConverter: %d", (int)status); + return; + } + UInt32 primingMethod = kConverterPrimeMethod_Normal; + status = AudioConverterSetProperty(context->captureFormatConverter, kAudioConverterPrimeMethod, + sizeof(UInt32), &primingMethod); + } + + context->audioTapPrepared = YES; + [context->audioDevice audioTapDidPrepare]; +} + +void AVPlayerProcessingTapUnprepare(MTAudioProcessingTapRef tap) { + NSLog(@"Unpreparing audio tap."); + + // Prevent any more frames from being consumed. Note that this might end audio playback early. + ExampleAVPlayerAudioTapContext *context = (ExampleAVPlayerAudioTapContext *)MTAudioProcessingTapGetStorage(tap); + TPCircularBuffer *capturingBuffer = context->capturingBuffer; + TPCircularBuffer *renderingBuffer = context->renderingBuffer; + + TPCircularBufferClear(capturingBuffer); + TPCircularBufferClear(renderingBuffer); + if (context->sourceCache) { + AudioBufferListFree(context->sourceCache); + context->sourceCache = NULL; + context->sourceCacheFrames = 0; + } + + if (context->renderFormatConverter != NULL) { + AudioConverterDispose(context->renderFormatConverter); + context->renderFormatConverter = NULL; + } + + if (context->captureFormatConverter != NULL) { + AudioConverterDispose(context->captureFormatConverter); + context->captureFormatConverter = NULL; + context->captureFormatConvertIsPrimed = NO; + } +} + +void AVPlayerProcessingTapProcess(MTAudioProcessingTapRef tap, + CMItemCount numberFrames, + MTAudioProcessingTapFlags flags, + AudioBufferList *bufferListInOut, + CMItemCount *numberFramesOut, + MTAudioProcessingTapFlags *flagsOut) { + ExampleAVPlayerAudioTapContext *context = (ExampleAVPlayerAudioTapContext *)MTAudioProcessingTapGetStorage(tap); + CMTimeRange sourceRange; + OSStatus status = MTAudioProcessingTapGetSourceAudio(tap, + numberFrames, + bufferListInOut, + flagsOut, + &sourceRange, + numberFramesOut); + if (status != noErr) { + // TODO: It might be useful to fill zeros here. + return; + } else if(CMTIMERANGE_IS_EMPTY(sourceRange) || + CMTIMERANGE_IS_INVALID(sourceRange)) { + return; + } + + UInt32 framesToCopy = (UInt32)*numberFramesOut; + + // Produce renderer buffers. These are interleaved, signed integer frames in the source's sample rate. + TPCircularBuffer *renderingBuffer = context->renderingBuffer; + AVPlayerAudioTapProduceConvertedFrames(renderingBuffer, + context->renderFormatConverter, + bufferListInOut, + framesToCopy, + &sourceRange, + kPreferredNumberOfChannels); + + // Produce capturer buffers. We will perform a sample rate conversion if needed. + TPCircularBuffer *capturingBuffer = context->capturingBuffer; + if (context->capturingSampleRateConversion) { + AVPlayerAudioTapProduceFilledFrames(capturingBuffer, + context->captureFormatConverter, + context->captureFormatConvertIsPrimed, + bufferListInOut, context->sourceCache, + &context->sourceCacheFrames, + framesToCopy, + kPreferredNumberOfChannels); + context->captureFormatConvertIsPrimed = YES; + } else { + AVPlayerAudioTapProduceConvertedFrames(capturingBuffer, + context->captureFormatConverter, + bufferListInOut, + framesToCopy, + &sourceRange, + kPreferredNumberOfChannels); + } + + // Flush converters on a discontinuity. This is especially important for priming a sample rate converter. + if (*flagsOut & kMTAudioProcessingTapFlag_EndOfStream) { + AudioConverterReset(context->renderFormatConverter); + AudioConverterReset(context->captureFormatConverter); + context->captureFormatConvertIsPrimed = NO; + } +} diff --git a/AVPlayerExample/Base.lproj/LaunchScreen.storyboard b/AVPlayerExample/Base.lproj/LaunchScreen.storyboard old mode 100644 new mode 100755 index fdf3f97d..bfa36129 --- a/AVPlayerExample/Base.lproj/LaunchScreen.storyboard +++ b/AVPlayerExample/Base.lproj/LaunchScreen.storyboard @@ -1,7 +1,8 @@ - + - + + @@ -9,14 +10,11 @@ - - - - + diff --git a/AVPlayerExample/Base.lproj/Main.storyboard b/AVPlayerExample/Base.lproj/Main.storyboard old mode 100644 new mode 100755 index f08bc4df..22ca14f4 --- a/AVPlayerExample/Base.lproj/Main.storyboard +++ b/AVPlayerExample/Base.lproj/Main.storyboard @@ -1,149 +1,133 @@ - + - - + + - + - - - - - - + + - - - - + + + + - + + - - - - - - + + + + + - - + + + + + + + - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + - - - - - - - - + + + + + + + + - + - + diff --git a/AVPlayerExample/ExampleAVPlayerAudioTap.swift b/AVPlayerExample/ExampleAVPlayerAudioTap.swift new file mode 100755 index 00000000..f27c687e --- /dev/null +++ b/AVPlayerExample/ExampleAVPlayerAudioTap.swift @@ -0,0 +1,64 @@ +// +// ExampleAVPlayerAudioTap.swift +// AVPlayerExample +// +// Copyright © 2018 Twilio Inc. All rights reserved. +// + +import Foundation +import MediaToolbox + +class ExampleAVPlayerAudioTap { + + static func mediaToolboxAudioProcessingTapCreate(audioTap: ExampleAVPlayerAudioTap) -> MTAudioProcessingTap? { + var callbacks = MTAudioProcessingTapCallbacks( + version: kMTAudioProcessingTapCallbacksVersion_0, + clientInfo: UnsafeMutableRawPointer(Unmanaged.passUnretained(audioTap).toOpaque()), + init: audioTap.tapInit, + finalize: audioTap.tapFinalize, + prepare: audioTap.tapPrepare, + unprepare: audioTap.tapUnprepare, + process: audioTap.tapProcess + ) + + var tap: Unmanaged? + let status = MTAudioProcessingTapCreate(kCFAllocatorDefault, + &callbacks, + kMTAudioProcessingTapCreationFlag_PostEffects, + &tap) + + if status == kCVReturnSuccess { + return tap!.takeUnretainedValue() + } else { + return nil + } + } + + let tapInit: MTAudioProcessingTapInitCallback = { (tap, clientInfo, tapStorageOut) in + let nonOptionalSelf = clientInfo!.assumingMemoryBound(to: ExampleAVPlayerAudioTap.self).pointee + print("init:", tap, clientInfo as Any, tapStorageOut, nonOptionalSelf) + } + + let tapFinalize: MTAudioProcessingTapFinalizeCallback = { + (tap) in + print(#function) + } + + let tapPrepare: MTAudioProcessingTapPrepareCallback = {(tap, b, c) in + print("Prepare:", tap, b, c) + } + + let tapUnprepare: MTAudioProcessingTapUnprepareCallback = {(tap) in + print("Unprepare:", tap) + } + + let tapProcess: MTAudioProcessingTapProcessCallback = { + (tap, numberFrames, flags, bufferListInOut, numberFramesOut, flagsOut) in + print("Process callback:", tap, numberFrames, flags, bufferListInOut, numberFramesOut, flagsOut) + + let status = MTAudioProcessingTapGetSourceAudio(tap, numberFrames, bufferListInOut, flagsOut, nil, numberFramesOut) + if status != kCVReturnSuccess { + print("Failed to get source audio: ", status) + } + } +} diff --git a/AVPlayerExample/ExampleAVPlayerSource.swift b/AVPlayerExample/ExampleAVPlayerSource.swift new file mode 100755 index 00000000..a4a14450 --- /dev/null +++ b/AVPlayerExample/ExampleAVPlayerSource.swift @@ -0,0 +1,254 @@ +// +// ExampleAVPlayerSource.swift +// AVPlayerExample +// +// Copyright © 2018 Twilio Inc. All rights reserved. +// + +import AVFoundation +import TwilioVideo + +/* + * This capturer manages an AVPlayerVideoItemOutput, attempting to output each frame that becomes available + * for presentation. By default, a CADisplayLink timer is used to sample at the natural cadence of the display. + * When there is no more content to sample, the capturer suspends its timer and waits for callbacks via + * AVPlayerItemOutputPullDelegate to resume. In some cases, downscaling is used to reduce CPU and memory consumption. + * + * Please be aware that AVPlayer and its playback pipeline prepare content for presentation on your device, including + * mapping frames to the display. For example, when playing 23.976 or 24 fps content a technique known as 3:2 pulldown + * is used to time video samples for a 60 Hz iPhone display. Our capturer tags the frames with the best timing infromation + * that it has available - the presentation timestamps provided by AVPlayerVideoItemOutput. + */ +class ExampleAVPlayerSource: NSObject, TVIVideoCapturer { + + private var captureConsumer: TVIVideoCaptureConsumer? = nil + // Track how often we are receiving content. If no new frames are coming there is no need to sample the output. + private var lastPresentationTimestamp: CMTime? + // Display timer which fires at the natural cadence of our display. Sampling typically occurs within these timer callbacks. + private var outputTimer: CADisplayLink? = nil + // Dispatch timer which fires at a pre-determined cadence `kFrameOutputInterval`. + private var timerSource: DispatchSourceTimer? = nil + var videoOutput: AVPlayerItemVideoOutput? = nil + private let videoSampleQueue: DispatchQueue + + // Frame output/sampling interval for a DispatchSource. Note: 60 Hz = 16667, 23.976 Hz = 41708 + static let kFrameOutputInterval = DispatchTimeInterval.microseconds(16667) + static let kFrameOutputLeeway = DispatchTimeInterval.milliseconds(0) + // How much time we will wait without receiving any frames before suspending output/sampling. + static let kFrameOutputSuspendTimeout = Double(1.0) + // The largest dimension we will output for streaming using the Video SDK. + static let kFrameOutputMaxDimension = CGFloat(960.0) + // A bounding box which represents the largest video we will output for streaming. + static let kFrameOutputMaxRect = CGRect(x: 0, y: 0, width: kFrameOutputMaxDimension, height: kFrameOutputMaxDimension) + + // Use a CADisplayLink, or a DispatchSourceTimer (experimental) for sampling. + static private var useDisplayLinkTimer = true + + init(item: AVPlayerItem) { + videoSampleQueue = DispatchQueue(label: "com.twilio.avplayersource", qos: DispatchQoS.userInteractive, + attributes: DispatchQueue.Attributes(rawValue: 0), + autoreleaseFrequency: DispatchQueue.AutoreleaseFrequency.workItem, + target: nil) + super.init() + + let presentationSize = item.presentationSize + let presentationPixels = presentationSize.width * presentationSize.height + print("Prepare for player item with size:", presentationSize, " pixels:", presentationPixels); + + /* + * We might request buffers downscaled for streaming. The output will always be 8-bit 4:2:0 NV12. + */ + let attributes: [String : Any] + + if (presentationSize.width > ExampleAVPlayerSource.kFrameOutputMaxDimension || + presentationSize.height > ExampleAVPlayerSource.kFrameOutputMaxDimension) { + let streamingRect = AVMakeRect(aspectRatio: presentationSize, insideRect: ExampleAVPlayerSource.kFrameOutputMaxRect) + print("Requesting downscaling to:", streamingRect.size, "."); + + attributes = [ + kCVPixelBufferWidthKey as String : Int(streamingRect.width), + kCVPixelBufferHeightKey as String : Int(streamingRect.height), + kCVPixelBufferIOSurfacePropertiesKey as String : [ : ], + kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange + ] as [String : Any] + } else { + attributes = [ + kCVPixelBufferIOSurfacePropertiesKey as String : [ : ], + kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange + ] as [String : Any] + } + + videoOutput = AVPlayerItemVideoOutput(pixelBufferAttributes: attributes) + videoOutput?.setDelegate(self, queue: videoSampleQueue) + + if ExampleAVPlayerSource.useDisplayLinkTimer { + addDisplayTimer() + } + videoOutput?.requestNotificationOfMediaDataChange(withAdvanceInterval: 0.02) + + item.add(videoOutput!) + } + + func outputFrame(itemTimestamp: CMTime) { + guard let output = videoOutput else { + return + } + guard let consumer = captureConsumer else { + return + } + if !output.hasNewPixelBuffer(forItemTime: itemTimestamp) { + // TODO: Consider suspending the timer and requesting a notification when media becomes available. +// print("No frame for host timestamp:", CACurrentMediaTime(), "\n", +// "Last presentation timestamp was:", lastPresentationTimestamp != nil ? lastPresentationTimestamp! : CMTime.zero) + return + } + + var presentationTimestamp = CMTime.zero + let pixelBuffer = output.copyPixelBuffer(forItemTime: itemTimestamp, + itemTimeForDisplay: &presentationTimestamp) + if let buffer = pixelBuffer { + if let lastTime = lastPresentationTimestamp { + // TODO: Use this info for 3:2 pulldown to re-construct the proper timestamps without display cadence? +// let delta = presentationTimestamp - lastTime +// print("Frame delta was:", delta) +// let movieTime = CVBufferGetAttachment(buffer, kCVBufferMovieTimeKey, nil) +// print("Movie time was:", movieTime as Any) + } + lastPresentationTimestamp = presentationTimestamp + + guard let frame = TVIVideoFrame(timestamp: presentationTimestamp, + buffer: buffer, + orientation: TVIVideoOrientation.up) else { + assertionFailure("We couldn't create a TVIVideoFrame with a valid CVPixelBuffer.") + return + } + consumer.consumeCapturedFrame(frame) + } + + if ExampleAVPlayerSource.useDisplayLinkTimer { + outputTimer?.isPaused = false + } else if timerSource == nil { + startTimerSource(hostTime: CACurrentMediaTime()) + } + } + + func startTimerSource(hostTime: CFTimeInterval) { + print(#function) + + let source = DispatchSource.makeTimerSource(flags: DispatchSource.TimerFlags.strict, + queue: videoSampleQueue) + timerSource = source + + source.setEventHandler(handler: { + if let output = self.videoOutput { + let currentHostTime = CACurrentMediaTime() + let currentItemTime = output.itemTime(forHostTime: currentHostTime) + self.outputFrame(itemTimestamp: currentItemTime) + } + }) + + // Thread safe cleanup of temporary storage, in case of cancellation. + source.setCancelHandler(handler: { + }) + + // Schedule a first time source for the full interval. + let deadline = DispatchTime.now() + ExampleAVPlayerSource.kFrameOutputInterval + source.schedule(deadline: deadline, + repeating: ExampleAVPlayerSource.kFrameOutputInterval, + leeway: ExampleAVPlayerSource.kFrameOutputLeeway) + source.resume() + } + + func addDisplayTimer() { + let timer = CADisplayLink(target: self, + selector: #selector(ExampleAVPlayerSource.displayLinkDidFire(displayLink:))) + // Fire at the native v-sync cadence of our display. This is what AVPlayer is targeting anyways. + timer.preferredFramesPerSecond = 0 + timer.isPaused = true + timer.add(to: RunLoop.current, forMode: RunLoop.Mode.common) + outputTimer = timer + } + + @objc func displayLinkDidFire(displayLink: CADisplayLink) { + if let output = self.videoOutput { + // We want the video content targeted for the next v-sync. + let targetHostTime = displayLink.targetTimestamp + let currentItemTime = output.itemTime(forHostTime: targetHostTime) + self.outputFrame(itemTimestamp: currentItemTime) + } + } + + @objc func stopTimerSource() { + print(#function) + + timerSource?.cancel() + timerSource = nil + } + + func stopDisplayTimer() { + outputTimer?.invalidate() + outputTimer = nil + } + + public var isScreencast: Bool { + get { + return false + } + } + + public var supportedFormats: [TVIVideoFormat] { + get { + let format = TVIVideoFormat() + format.dimensions = CMVideoDimensions(width: 640, height: 360) + format.frameRate = 30 + format.pixelFormat = TVIPixelFormat.formatYUV420BiPlanarFullRange + return [format] + } + } + + func startCapture(_ format: TVIVideoFormat, consumer: TVIVideoCaptureConsumer) { + print(#function) + + self.captureConsumer = consumer; + consumer.captureDidStart(true) + } + + func stopCapture() { + print(#function) + + if ExampleAVPlayerSource.useDisplayLinkTimer { + stopDisplayTimer() + } else { + stopTimerSource() + } + self.captureConsumer = nil + } +} + +extension ExampleAVPlayerSource: AVPlayerItemOutputPullDelegate { + + func outputMediaDataWillChange(_ sender: AVPlayerItemOutput) { + print(#function) + + // Begin to receive video frames. + let videoOutput = sender as! AVPlayerItemVideoOutput + let currentHostTime = CACurrentMediaTime() + let currentItemTime = videoOutput.itemTime(forHostTime: currentHostTime) + + // We might have been called back so late that the output already has a frame ready. + let hasFrame = videoOutput.hasNewPixelBuffer(forItemTime: currentItemTime) + if hasFrame { + outputFrame(itemTimestamp: currentItemTime) + } else if ExampleAVPlayerSource.useDisplayLinkTimer { + outputTimer?.isPaused = false + } else { + startTimerSource(hostTime: currentHostTime); + } + } + + func outputSequenceWasFlushed(_ output: AVPlayerItemOutput) { + print(#function) + + // TODO: Flush and output a black frame while we wait? + } +} diff --git a/AVPlayerExample/ExampleAVPlayerView.swift b/AVPlayerExample/ExampleAVPlayerView.swift new file mode 100755 index 00000000..04cd4f24 --- /dev/null +++ b/AVPlayerExample/ExampleAVPlayerView.swift @@ -0,0 +1,54 @@ +// +// ExampleAVPlayerView.swift +// AVPlayerExample +// +// Copyright © 2018 Twilio Inc. All rights reserved. +// + +import AVFoundation +import UIKit + +class ExampleAVPlayerView: UIView { + + init(frame: CGRect, player: AVPlayer) { + super.init(frame: frame) + self.playerLayer.player = player + self.contentMode = .scaleAspectFit + } + + required init?(coder aDecoder: NSCoder) { + super.init(coder: aDecoder) + // It won't be possible to hookup an AVPlayer yet. + self.contentMode = .scaleAspectFit + } + + var playerLayer : AVPlayerLayer { + get { + return self.layer as! AVPlayerLayer + } + } + + override var contentMode: UIView.ContentMode { + set { + switch newValue { + case .scaleAspectFill: + playerLayer.videoGravity = .resizeAspectFill + case .scaleAspectFit: + playerLayer.videoGravity = .resizeAspect + case .scaleToFill: + playerLayer.videoGravity = .resize + default: + playerLayer.videoGravity = .resizeAspect + } + super.contentMode = newValue + } + + get { + return super.contentMode + } + } + + override class var layerClass : AnyClass { + return AVPlayerLayer.self + } +} diff --git a/AVPlayerExample/Info.plist b/AVPlayerExample/Info.plist old mode 100644 new mode 100755 index 333ce04b..9e96ba8f --- a/AVPlayerExample/Info.plist +++ b/AVPlayerExample/Info.plist @@ -2,13 +2,35 @@ - NSAppTransportSecurity - - NSAllowsArbitraryLoads - - CFBundleDevelopmentRegion - en + $(DEVELOPMENT_LANGUAGE) + CFBundleDocumentTypes + + + CFBundleTypeIconFiles + + CFBundleTypeName + mpeg4 + LSHandlerRank + Default + LSItemContentTypes + + public.mpeg-4 + + + + CFBundleTypeIconFiles + + CFBundleTypeName + quicktime + LSHandlerRank + Default + LSItemContentTypes + + com.apple.quicktime-movie + + + CFBundleExecutable $(EXECUTABLE_NAME) CFBundleIdentifier @@ -25,13 +47,19 @@ 1 LSRequiresIPhoneOS + NSAppTransportSecurity + + NSAllowsArbitraryLoadsForMedia + + NSCameraUsageDescription - ${PRODUCT_NAME} uses your camera to capture video which is shared with other Room Participants. + ${PRODUCT_NAME} uses your camera to capture video which is shared with other Viewers. NSMicrophoneUsageDescription - ${PRODUCT_NAME} uses your microphone to capture audio which is shared with other Room Participants. + ${PRODUCT_NAME} shares your microphone with other Viewers. Tap to mute your audio at any time. UIBackgroundModes audio + voip UILaunchStoryboardName LaunchScreen @@ -41,6 +69,8 @@ armv7 + UIRequiresPersistentWiFi + UISupportedInterfaceOrientations UIInterfaceOrientationPortrait @@ -54,5 +84,7 @@ UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight + LSSupportsOpeningDocumentsInPlace + diff --git a/AVPlayerExample/README.md b/AVPlayerExample/README.md deleted file mode 100644 index b463b863..00000000 --- a/AVPlayerExample/README.md +++ /dev/null @@ -1,17 +0,0 @@ -# AVPlayer example for Objective-C - -This example demonstrates how to use `AVPlayer` to stream Audio & Video content while connected to a `TVIRoom`. - -### Setup - -See the master [README](https://github.com/twilio/video-quickstart-ios/blob/master/README.md) for instructions on how to generate access tokens and connect to a Room. - -## Usage - -This example is very similar to the basic Quickstart. However, if you join a Room with no other Participants the app will stream media using `AVPlayer` while you wait. Once the first Participant joins the media content is paused and the remote video is shown in its place. - -In order to use `AVPlayer` along with Twilio Video the `TVIAudioController+CallKit` APIs are used. Unlike normal CallKit operation, the application manually activates and deactivates `AVAudioSession` as needed. - -## Known Issues - -We are currently experiencing some problems with low output volume when `AVPlayer` content is mixed with remote Participant audio. This occurs when using the built-in device loudspeaker and microphone, but not when using headphones to monitor audio. For more information please refer to issue [#402](https://github.com/twilio/video-quickstart-ios/issues/402). diff --git a/AVPlayerExample/Utils.h b/AVPlayerExample/Utils.h deleted file mode 100644 index 68b7152e..00000000 --- a/AVPlayerExample/Utils.h +++ /dev/null @@ -1,21 +0,0 @@ -// -// Utils.h -// AVPlayerExample -// -// Copyright © 2016-2017 Twilio, Inc. All rights reserved. -// - -#import - -@interface PlatformUtils : NSObject - -+ (BOOL)isSimulator; - -@end - -@interface TokenUtils : NSObject - -+ (void)retrieveAccessTokenFromURL:(NSString *)tokenURLStr - completion:(void (^)(NSString* token, NSError *err)) completionHandler; - -@end diff --git a/AVPlayerExample/Utils.m b/AVPlayerExample/Utils.m deleted file mode 100644 index 927cca6d..00000000 --- a/AVPlayerExample/Utils.m +++ /dev/null @@ -1,42 +0,0 @@ -// -// Utils.m -// AVPlayerExample -// -// Copyright © 2016-2017 Twilio, Inc. All rights reserved. -// - -#import "Utils.h" - -@implementation PlatformUtils - -+ (BOOL)isSimulator { -#if TARGET_IPHONE_SIMULATOR - return YES; -#endif - return NO; -} - -@end - -@implementation TokenUtils - -+ (void)retrieveAccessTokenFromURL:(NSString *)tokenURLStr - completion:(void (^)(NSString* token, NSError *err)) completionHandler { - NSURL *tokenURL = [NSURL URLWithString:tokenURLStr]; - NSURLSessionConfiguration *sessionConfig = [NSURLSessionConfiguration defaultSessionConfiguration]; - NSURLSession *session = [NSURLSession sessionWithConfiguration:sessionConfig]; - NSURLSessionDataTask *task = [session dataTaskWithURL:tokenURL - completionHandler: ^(NSData * _Nullable data, - NSURLResponse * _Nullable response, - NSError * _Nullable error) { - NSString *accessToken = nil; - if (!error && data) { - accessToken = [[NSString alloc] initWithData:data - encoding:NSUTF8StringEncoding]; - } - completionHandler(accessToken, error); - }]; - [task resume]; -} - -@end diff --git a/AVPlayerExample/ViewController.h b/AVPlayerExample/ViewController.h deleted file mode 100644 index 35235ddb..00000000 --- a/AVPlayerExample/ViewController.h +++ /dev/null @@ -1,14 +0,0 @@ -// -// ViewController.h -// AVPlayerExample -// -// Copyright © 2016-2017 Twilio, Inc. All rights reserved. -// - -#import - -@interface ViewController : UIViewController - - -@end - diff --git a/AVPlayerExample/ViewController.m b/AVPlayerExample/ViewController.m deleted file mode 100644 index 88fbea51..00000000 --- a/AVPlayerExample/ViewController.m +++ /dev/null @@ -1,592 +0,0 @@ -// -// ViewController.m -// AVPlayerExample -// -// Copyright © 2016-2017 Twilio, Inc. All rights reserved. -// - -#import "ViewController.h" - -@import AVFoundation; -@import TwilioVideo; - -#import "AVPlayerView.h" -#import "Utils.h" - -typedef NS_ENUM(NSUInteger, ViewControllerState) { - /** - * The initial lobby UI is shown. - */ - ViewControllerStateLobby = 0, - /** - * The AVPlayer UI is shown. - */ - ViewControllerStateMediaPlayer, - /** - * The in Room UI is shown. - */ - ViewControllerStateRoom -}; - -NSString *const kVideoMovURL = @"https://s3-us-west-1.amazonaws.com/avplayervideo/What+Is+Cloud+Communications.mov"; -NSString *const kStatusKey = @"status"; - -@interface ViewController () - -// Configure access token manually for testing in `viewDidLoad`, if desired! Create one manually in the console. -@property (nonatomic, strong) NSString *accessToken; -@property (nonatomic, strong) NSString *tokenUrl; - -#pragma mark Video SDK components - -@property (nonatomic, strong) TVIRoom *room; -@property (nonatomic, strong) TVIDefaultAudioDevice *audioDevice; -@property (nonatomic, strong) TVICameraSource *camera; -@property (nonatomic, strong) TVILocalVideoTrack *localVideoTrack; -@property (nonatomic, strong) TVILocalAudioTrack *localAudioTrack; -@property (nonatomic, strong) TVIRemoteParticipant *remoteParticipant; -@property (nonatomic, weak) TVIVideoView *remoteView; - -#pragma mark AVPlayer - -@property (nonatomic, strong) AVPlayer *videoPlayer; -@property (nonatomic, weak) AVPlayerView *videoPlayerView; - -#pragma mark UI Element Outlets and handles - -// `TVIVideoView` created from a storyboard -@property (nonatomic, weak) IBOutlet TVIVideoView *previewView; - -@property (nonatomic, weak) IBOutlet UIView *connectButton; -@property (nonatomic, weak) IBOutlet UIButton *disconnectButton; -@property (nonatomic, weak) IBOutlet UILabel *messageLabel; -@property (nonatomic, weak) IBOutlet UITextField *roomTextField; -@property (nonatomic, weak) IBOutlet UIButton *micButton; -@property (nonatomic, weak) IBOutlet UILabel *roomLabel; -@property (nonatomic, weak) IBOutlet UILabel *roomLine; - -@end - -@implementation ViewController - -- (void)dealloc { - // We are done with AVAudioSession - [self stopAudioDevice]; -} - -#pragma mark - UIViewController - -- (void)viewDidLoad { - [super viewDidLoad]; - - [self logMessage:[NSString stringWithFormat:@"TwilioVideo v%@", [TwilioVideo version]]]; - - // Configure access token for testing. Create one manually in the console - // at https://www.twilio.com/console/video/runtime/testing-tools - self.accessToken = @"TWILIO_ACCESS_TOKEN"; - - // Using a token server to provide access tokens? Make sure the tokenURL is pointing to the correct location. - self.tokenUrl = @"http://localhost:8000/token.php"; - - // Start with the Lobby UI - [self showInterfaceState:ViewControllerStateLobby]; - - self.roomTextField.autocapitalizationType = UITextAutocapitalizationTypeNone; - self.roomTextField.delegate = self; - - UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(dismissKeyboard)]; - [self.view addGestureRecognizer:tap]; - - /** - * We will create an audio device and manage it's lifecycle in response to the AVPlayer events. Please note that the - * SDK does not support the use of multiple audio devices at the same time. If you've already connected to a Room, - * then all future connection attempts must use the same TVIDefaultAudioDevice as the first Room. Once all the existing - * Rooms are disconnected you are free to choose a new audio device for your next connection attempt. - */ - self.audioDevice = [TVIDefaultAudioDevice audioDevice]; - - // Prepare local media which we will share with Room Participants. - [self prepareMedia]; -} - -- (void)viewWillLayoutSubviews { - [super viewWillLayoutSubviews]; - - self.videoPlayerView.frame = CGRectMake(0, 0, CGRectGetWidth(self.view.bounds), CGRectGetHeight(self.view.bounds)); - self.remoteView.frame = CGRectMake(0, 0, CGRectGetWidth(self.view.bounds), CGRectGetHeight(self.view.bounds)); -} - -#pragma mark - NSObject - -- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { - NSLog(@"Player changed: %@ status: %@", object, change); -} - -#pragma mark - Public - -- (IBAction)connectButtonPressed:(id)sender { - [self showInterfaceState:ViewControllerStateMediaPlayer]; - [self dismissKeyboard]; - - if ([self.accessToken isEqualToString:@"TWILIO_ACCESS_TOKEN"]) { - [self fetchTokenAndConnect]; - } else { - [self doConnect]; - } -} - -- (IBAction)disconnectButtonPressed:(id)sender { - [self.room disconnect]; -} - -- (IBAction)micButtonPressed:(id)sender { - // We will toggle the mic to mute/unmute and change the title according to the user action. - - if (self.localAudioTrack) { - self.localAudioTrack.enabled = !self.localAudioTrack.isEnabled; - - // Toggle the button title - if (self.localAudioTrack.isEnabled) { - [self.micButton setTitle:@"Mute" forState:UIControlStateNormal]; - } else { - [self.micButton setTitle:@"Unmute" forState:UIControlStateNormal]; - } - } -} - -#pragma mark - Private - -- (void)startPreview { - // TVICameraSource is not supported with the Simulator. - if ([PlatformUtils isSimulator]) { - [self.previewView removeFromSuperview]; - return; - } - - AVCaptureDevice *frontCamera = [TVICameraSource captureDeviceForPosition:AVCaptureDevicePositionFront]; - AVCaptureDevice *backCamera = [TVICameraSource captureDeviceForPosition:AVCaptureDevicePositionBack]; - - if (frontCamera != nil || backCamera != nil) { - self.camera = [[TVICameraSource alloc] initWithDelegate:self]; - self.localVideoTrack = [TVILocalVideoTrack trackWithSource:self.camera - enabled:YES - name:@"Camera"]; - // Add renderer to video track for local preview - [self.localVideoTrack addRenderer:self.previewView]; - [self logMessage:@"Video track created"]; - - if (frontCamera != nil && backCamera != nil) { - UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self - action:@selector(flipCamera)]; - [self.previewView addGestureRecognizer:tap]; - } - - [self.camera startCaptureWithDevice:frontCamera != nil ? frontCamera : backCamera - completion:^(AVCaptureDevice *device, TVIVideoFormat *format, NSError *error) { - if (error != nil) { - [self logMessage:[NSString stringWithFormat:@"Start capture failed with error.\ncode = %lu error = %@", error.code, error.localizedDescription]]; - } else { - self.previewView.mirror = (device.position == AVCaptureDevicePositionFront); - } - }]; - } else { - [self logMessage:@"No front or back capture device found!"]; - } -} - -- (void)flipCamera { - AVCaptureDevice *newDevice = nil; - - if (self.camera.device.position == AVCaptureDevicePositionFront) { - newDevice = [TVICameraSource captureDeviceForPosition:AVCaptureDevicePositionBack]; - } else { - newDevice = [TVICameraSource captureDeviceForPosition:AVCaptureDevicePositionFront]; - } - - if (newDevice != nil) { - [self.camera selectCaptureDevice:newDevice completion:^(AVCaptureDevice *device, TVIVideoFormat *format, NSError *error) { - if (error != nil) { - [self logMessage:[NSString stringWithFormat:@"Error selecting capture device.\ncode = %lu error = %@", error.code, error.localizedDescription]]; - } else { - self.previewView.mirror = (device.position == AVCaptureDevicePositionFront); - } - }]; - } -} - -- (void)prepareMedia { - // We will share audio and video when we connect to the Room. - - /* - * The important thing to remember when providing a TVIAudioDevice is that the device must be set - * before performing any other actions with the SDK (such as creating Tracks, or connecting to a Room). - * In this case we've already initialized our own `TVIDefaultAudioDevice` instance which we will now set. - */ - TwilioVideo.audioDevice = self.audioDevice; - - // Create an audio track. - if (!self.localAudioTrack) { - self.localAudioTrack = [TVILocalAudioTrack track]; - - if (!self.localAudioTrack) { - [self logMessage:@"Failed to add audio track"]; - } - } - - // Create a video track which captures from the camera. - [self startPreview]; -} - -- (void)startAudioDevice { - self.audioDevice.enabled = YES; -} - -- (void)stopAudioDevice { - self.audioDevice.enabled = NO; -} - -- (void)startVideoPlayer { - if (self.videoPlayer != nil) { - [self logMessage:@"Using an already prepared AVPlayer"]; - [self.videoPlayer play]; - return; - } - - NSURL *contentUrl = [NSURL URLWithString:kVideoMovURL]; - AVPlayer *player = [AVPlayer playerWithURL:contentUrl]; - [player addObserver:self forKeyPath:kStatusKey options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld context:nil]; - [player play]; - - self.videoPlayer = player; - - // Add Video UI on screen. - AVPlayerView *playerView = [[AVPlayerView alloc] initWithPlayer:player]; - [self.view insertSubview:playerView atIndex:0]; - self.videoPlayerView = playerView; - - // We will rely on frame based layout to size and position `self.videoPlayerView`. - [self.view setNeedsLayout]; -} - -- (void)stopVideoPlayer { - [self.videoPlayer pause]; - [self.videoPlayer removeObserver:self forKeyPath:kStatusKey]; - self.videoPlayer = nil; - - // Remove Video UI from screen. - [self.videoPlayerView removeFromSuperview]; - self.videoPlayerView = nil; -} - -- (void)fetchTokenAndConnect { - [self logMessage:[NSString stringWithFormat:@"Fetching an access token"]]; - - [TokenUtils retrieveAccessTokenFromURL:self.tokenUrl completion:^(NSString *token, NSError *err) { - dispatch_async(dispatch_get_main_queue(), ^{ - if (!err) { - self.accessToken = token; - [self doConnect]; - } else { - [self logMessage:[NSString stringWithFormat:@"Error retrieving the access token"]]; - [self showInterfaceState:ViewControllerStateLobby]; - } - }); - }]; -} - -- (void)doConnect { - if ([self.accessToken isEqualToString:@"TWILIO_ACCESS_TOKEN"]) { - [self logMessage:@"Please provide a valid token to connect to a room"]; - return; - } - - // Since we are configuring audio session explicitly, we will call startAudioDevice every time we attempt to connect. - [self startAudioDevice]; - - TVIConnectOptions *connectOptions = [TVIConnectOptions optionsWithToken:self.accessToken - block:^(TVIConnectOptionsBuilder * _Nonnull builder) { - - // Use the local media that we prepared earlier. - builder.audioTracks = self.localAudioTrack ? @[ self.localAudioTrack ] : @[ ]; - builder.videoTracks = self.localVideoTrack ? @[ self.localVideoTrack ] : @[ ]; - - // The name of the Room where the Client will attempt to connect to. Please note that if you pass an empty - // Room `name`, the Client will create one for you. You can get the name or sid from any connected Room. - builder.roomName = self.roomTextField.text; - }]; - - // Connect to the Room using the options we provided. - self.room = [TwilioVideo connectWithOptions:connectOptions delegate:self]; - - [self logMessage:[NSString stringWithFormat:@"Attempting to connect to room %@", self.roomTextField.text]]; -} - -- (void)setupRemoteView { - // Creating a `TVIVideoView` programmatically. - TVIVideoView *remoteView = [[TVIVideoView alloc] init]; - - // `TVIVideoView` supports UIViewContentModeScaleToFill, UIViewContentModeScaleAspectFill and UIViewContentModeScaleAspectFit - // UIViewContentModeScaleAspectFit is the default mode when you create `TVIVideoView` programmatically. - self.remoteView.contentMode = UIViewContentModeScaleAspectFit; - - [self.view insertSubview:remoteView atIndex:0]; - self.remoteView = remoteView; - - // We will rely on frame based layout to size and position `self.remoteView`. - [self.view setNeedsLayout]; -} - -// Reset the client ui status -- (void)showInterfaceState:(ViewControllerState)state { - self.roomTextField.hidden = state != ViewControllerStateLobby; - self.connectButton.hidden = state != ViewControllerStateLobby; - self.roomLine.hidden = state != ViewControllerStateLobby; - self.roomLabel.hidden = state != ViewControllerStateLobby; - self.micButton.hidden = state != ViewControllerStateRoom; - self.messageLabel.hidden = state == ViewControllerStateMediaPlayer; - self.disconnectButton.hidden = state == ViewControllerStateLobby; - [UIApplication sharedApplication].idleTimerDisabled = state != ViewControllerStateLobby; -} - -- (void)dismissKeyboard { - if (self.roomTextField.isFirstResponder) { - [self.roomTextField resignFirstResponder]; - } -} - -- (void)cleanupRemoteParticipant { - if (self.remoteParticipant) { - if ([self.remoteParticipant.videoTracks count] > 0) { - TVIRemoteVideoTrack *videoTrack = self.remoteParticipant.remoteVideoTracks[0].remoteTrack; - [videoTrack removeRenderer:self.remoteView]; - [self.remoteView removeFromSuperview]; - } - self.remoteParticipant = nil; - } -} - -- (void)logMessage:(NSString *)msg { - NSLog(@"%@", msg); - self.messageLabel.text = msg; -} - -#pragma mark - TVIRoomDelegate - -- (void)didConnectToRoom:(TVIRoom *)room { - // At the moment, this example only supports rendering one Participant at a time. - - [self logMessage:[NSString stringWithFormat:@"Connected to room %@ as %@", room.name, room.localParticipant.identity]]; - - if (room.remoteParticipants.count > 0) { - self.remoteParticipant = room.remoteParticipants[0]; - self.remoteParticipant.delegate = self; - [self showInterfaceState:ViewControllerStateRoom]; - } else { - // If there are no Participants, we will play the pre-roll content instead. - [self startVideoPlayer]; - [self showInterfaceState:ViewControllerStateMediaPlayer]; - } -} - -- (void)room:(TVIRoom *)room didDisconnectWithError:(nullable NSError *)error { - [self logMessage:[NSString stringWithFormat:@"Disconncted from room %@, error = %@", room.name, error]]; - - if (!self.videoPlayer) { - [self stopAudioDevice]; - } else { - [self stopVideoPlayer]; - } - - [self cleanupRemoteParticipant]; - self.room = nil; - [self showInterfaceState:ViewControllerStateLobby]; -} - -- (void)room:(TVIRoom *)room didFailToConnectWithError:(nonnull NSError *)error{ - [self logMessage:[NSString stringWithFormat:@"Failed to connect to room, error = %@", error]]; - - self.room = nil; - - [self showInterfaceState:ViewControllerStateLobby]; -} - -- (void)room:(TVIRoom *)room isReconnectingWithError:(NSError *)error { - NSString *message = [NSString stringWithFormat:@"Reconnecting due to %@", error.localizedDescription]; - [self logMessage:message]; -} - -- (void)didReconnectToRoom:(TVIRoom *)room { - [self logMessage:@"Reconnected to room"]; -} - -- (void)room:(TVIRoom *)room participantDidConnect:(TVIRemoteParticipant *)participant { - if (!self.remoteParticipant) { - self.remoteParticipant = participant; - self.remoteParticipant.delegate = self; - } - - if ([room.remoteParticipants count] == 1) { - [self stopVideoPlayer]; - [self showInterfaceState:ViewControllerStateRoom]; - } - - [self logMessage:[NSString stringWithFormat:@"Participant %@ connected with %lu audio and %lu video tracks", - participant.identity, - (unsigned long)[participant.audioTracks count], - (unsigned long)[participant.videoTracks count]]]; -} - -- (void)room:(TVIRoom *)room participantDidDisconnect:(TVIRemoteParticipant *)participant { - if (self.remoteParticipant == participant) { - [self cleanupRemoteParticipant]; - } - - if ([room.remoteParticipants count] == 0) { - [self startVideoPlayer]; - [self showInterfaceState:ViewControllerStateMediaPlayer]; - } - - [self logMessage:[NSString stringWithFormat:@"Room %@ participant %@ disconnected", room.name, participant.identity]]; -} - -#pragma mark - TVIRemoteParticipantDelegate - -- (void)remoteParticipant:(TVIRemoteParticipant *)participant - publishedVideoTrack:(TVIRemoteVideoTrackPublication *)publication { - - // Remote Participant has offered to share the video Track. - - [self logMessage:[NSString stringWithFormat:@"Participant %@ published video track.", participant.identity]]; -} - -- (void)remoteParticipant:(TVIRemoteParticipant *)participant - unpublishedVideoTrack:(TVIRemoteVideoTrackPublication *)publication { - - // Remote Participant has stopped sharing the video Track. - - [self logMessage:[NSString stringWithFormat:@"Participant %@ unpublished video track.", participant.identity]]; -} - -- (void)remoteParticipant:(TVIRemoteParticipant *)participant - publishedAudioTrack:(TVIRemoteAudioTrackPublication *)publication { - - // Remote Participant has offered to share the audio Track. - - [self logMessage:[NSString stringWithFormat:@"Participant %@ published audio track.", participant.identity]]; -} - -- (void)remoteParticipant:(TVIRemoteParticipant *)participant - unpublishedAudioTrack:(TVIRemoteAudioTrackPublication *)publication { - - // Remote Participant has stopped sharing the audio Track. - - [self logMessage:[NSString stringWithFormat:@"Participant %@ unpublished audio track.", participant.identity]]; -} - -- (void)subscribedToVideoTrack:(TVIRemoteVideoTrack *)videoTrack - publication:(TVIRemoteVideoTrackPublication *)publication - forParticipant:(TVIRemoteParticipant *)participant { - - // We are subscribed to the remote Participant's audio Track. We will start receiving the - // remote Participant's video frames now. - - [self logMessage:[NSString stringWithFormat:@"Subscribed to video track for Participant %@", participant.identity]]; - - if (self.remoteParticipant == participant) { - [self setupRemoteView]; - [videoTrack addRenderer:self.remoteView]; - } -} - -- (void)unsubscribedFromVideoTrack:(TVIRemoteVideoTrack *)videoTrack - publication:(TVIRemoteVideoTrackPublication *)publication - forParticipant:(TVIRemoteParticipant *)participant { - - // We are unsubscribed from the remote Participant's video Track. We will no longer receive the - // remote Participant's video. - - [self logMessage:[NSString stringWithFormat:@"Unsubscribed from video track for Participant %@", participant.identity]]; - - if (self.remoteParticipant == participant) { - [videoTrack removeRenderer:self.remoteView]; - [self.remoteView removeFromSuperview]; - } -} - -- (void)subscribedToAudioTrack:(TVIRemoteAudioTrack *)audioTrack - publication:(TVIRemoteAudioTrackPublication *)publication - forParticipant:(TVIRemoteParticipant *)participant { - - // We are subscribed to the remote Participant's audio Track. We will start receiving the - // remote Participant's audio now. - - [self logMessage:[NSString stringWithFormat:@"Subscribed to audio track for Participant %@", participant.identity]]; -} - -- (void)unsubscribedFromAudioTrack:(TVIRemoteAudioTrack *)audioTrack - publication:(TVIRemoteAudioTrackPublication *)publication - forParticipant:(TVIRemoteParticipant *)participant { - - // We are unsubscribed from the remote Participant's audio Track. We will no longer receive the - // remote Participant's audio. - - [self logMessage:[NSString stringWithFormat:@"Unsubscribed from audio track for Participant %@", participant.identity]]; -} - -- (void)remoteParticipant:(TVIRemoteParticipant *)participant - enabledVideoTrack:(TVIRemoteVideoTrackPublication *)publication { - [self logMessage:[NSString stringWithFormat:@"Participant %@ enabled video track.", participant.identity]]; -} - -- (void)remoteParticipant:(TVIRemoteParticipant *)participant - disabledVideoTrack:(TVIRemoteVideoTrackPublication *)publication { - [self logMessage:[NSString stringWithFormat:@"Participant %@ disabled video track.", participant.identity]]; -} - -- (void)remoteParticipant:(TVIRemoteParticipant *)participant - enabledAudioTrack:(TVIRemoteAudioTrackPublication *)publication { - [self logMessage:[NSString stringWithFormat:@"Participant %@ enabled audio track.", participant.identity]]; -} - -- (void)remoteParticipant:(TVIRemoteParticipant *)participant - disabledAudioTrack:(TVIRemoteAudioTrackPublication *)publication { - [self logMessage:[NSString stringWithFormat:@"Participant %@ disabled audio track.", participant.identity]]; -} - -- (void)failedToSubscribeToAudioTrack:(TVIRemoteAudioTrackPublication *)publication - error:(NSError *)error - forParticipant:(TVIRemoteParticipant *)participant { - [self logMessage:[NSString stringWithFormat:@"Participant %@ failed to subscribe to %@ audio track.", - participant.identity, publication.trackName]]; -} - -- (void)failedToSubscribeToVideoTrack:(TVIRemoteVideoTrackPublication *)publication - error:(NSError *)error - forParticipant:(TVIRemoteParticipant *)participant { - [self logMessage:[NSString stringWithFormat:@"Participant %@ failed to subscribe to %@ video track.", - participant.identity, publication.trackName]]; -} - -#pragma mark - TVIVideoViewDelegate - -- (void)videoView:(TVIVideoView *)view videoDimensionsDidChange:(CMVideoDimensions)dimensions { - NSLog(@"Dimensions changed to: %d x %d", dimensions.width, dimensions.height); - [self.view setNeedsLayout]; -} - -#pragma mark - TVICameraSourceDelegate -- (void)cameraSource:(TVICameraSource *)source didFailWithError:(NSError *)error { - [self logMessage:[NSString stringWithFormat:@"Capture failed with error.\ncode = %lu error = %@", error.code, error.localizedDescription]]; -} - -- (void)cameraSourceWasInterrupted:(TVICameraSource *)source reason:(AVCaptureSessionInterruptionReason)reason { - // We will disable `self.localVideoTrack` when the TVICameraCapturer is interrupted. - // This prevents other Participants from seeing a frozen frame while the Client is backgrounded. - self.localVideoTrack.enabled = NO; -} - -- (void)cameraSourceInterruptionEnded:(TVICameraSource *)source { - self.localVideoTrack.enabled = YES; -} - -@end diff --git a/AVPlayerExample/ViewController.swift b/AVPlayerExample/ViewController.swift new file mode 100755 index 00000000..ad6c73ea --- /dev/null +++ b/AVPlayerExample/ViewController.swift @@ -0,0 +1,798 @@ +// +// ViewController.swift +// AVPlayerExample +// +// Copyright © 2018 Twilio Inc. All rights reserved. +// + +import AVFoundation +import UIKit + +class ViewController: UIViewController { + + // MARK: View Controller Members + + // Configure access token manually for testing, if desired! Create one manually in the console + // at https://www.twilio.com/console/video/runtime/testing-tools + var accessToken = "TWILIO_ACCESS_TOKEN" + + // Configure remote URL to fetch token from + var tokenUrl = "http://localhost:8000/token.php" + + // Twilio Video classes. + var room: TVIRoom? + var camera: TVICameraSource? + var localVideoTrack: TVILocalVideoTrack! + var playerVideoTrack: TVILocalVideoTrack? + var localAudioTrack: TVILocalAudioTrack! + + // How long we will spend in pre-roll, attempting to synchronize our AVPlayer and AudioUnit graph. + static let kPrerollDuration = Double(1.0) + static let kPlayerTrackName = "player-track" + + // AVPlayer Audio/Video. + var audioDevice: ExampleAVPlayerAudioDevice? + var videoPlayer: AVPlayer? = nil + var videoPlayerAudioTap: ExampleAVPlayerAudioTap? = nil + var videoPlayerSource: ExampleAVPlayerSource? = nil + var videoPlayerView: ExampleAVPlayerView? = nil + var videoPlayerUrl: URL? = nil + var videoPlayerPreroll: Bool = false + + @IBOutlet weak var localHeightConstraint: NSLayoutConstraint? + @IBOutlet weak var localWidthConstraint: NSLayoutConstraint? + @IBOutlet weak var remoteHeightConstraint: NSLayoutConstraint? + @IBOutlet weak var remoteWidthConstraint: NSLayoutConstraint? + + @IBOutlet weak var hangupButton: UIButton! + @IBOutlet weak var connectButton: UIButton! + @IBOutlet weak var roomTextField: UITextField! + @IBOutlet weak var greyLine: UIView! + @IBOutlet weak var roomLabel: UILabel! + + @IBOutlet weak var audioMixingSwitch: UISwitch! + + @IBOutlet weak var localView: TVIVideoView! + weak var remotePlayerView: TVIVideoView? + @IBOutlet weak var remoteView: TVIVideoView! + + static let kRemoteContentUrls = [ + // Nice stereo separation in the trailer music. We now record and playback in stereo. + "American Animals Trailer 2 (720p24, 44.1 kHz)" : URL(string: "http://movietrailers.apple.com/movies/independent/american-animals/american-animals-trailer-2_h720p.mov")!, + "Avengers: Infinity War Trailer 3 (720p24, 44.1 kHz)" : URL(string: "https://trailers.apple.com/movies/marvel/avengers-infinity-war/avengers-infinity-war-trailer-2_h720p.mov")!, + // HLS stream which runs into the AVPlayer / AVAudioMix issue. + "BitDash - Parkour (HLS)" : URL(string: "https://bitdash-a.akamaihd.net/content/MI201109210084_1/m3u8s/f08e80da-bf1d-4e3d-8899-f0f6155f6efa.m3u8")!, + // 540p variant taken directly from the master playlist above. Still shows the AVPlayer issue. + "BitDash - Parkour (HLS, 540p)" : URL(string: "https://bitdash-a.akamaihd.net/content/MI201109210084_1/m3u8s/f08e80da-bf1d-4e3d-8899-f0f6155f6efa_video_540_1200000.m3u8")!, + // Progressive download mp4 version. Demonstrates that 48 kHz support is incorrect right now. + "BitDash - Parkour (1080p25, 48 kHz)" : URL(string: "https://bitmovin-a.akamaihd.net/content/MI201109210084_1/MI201109210084_mpeg-4_hd_high_1080p25_10mbits.mp4")!, + // Encoding in 1080p takes significantly more CPU than 720p + "Interstellar Trailer 3 (720p24, 44.1 kHz)" : URL(string: "http://movietrailers.apple.com/movies/paramount/interstellar/interstellar-tlr4_h720p.mov")!, + "Interstellar Trailer 3 (1080p24, 44.1 kHz)" : URL(string: "http://movietrailers.apple.com/movies/paramount/interstellar/interstellar-tlr4_h1080p.mov")!, + // Most trailers have a lot of cuts... this one not as many + "Mississippi Grind (720p24, 44.1 kHz)" : URL(string: "http://movietrailers.apple.com/movies/independent/mississippigrind/mississippigrind-tlr1_h1080p.mov")!, + // HLS stream which runs into the AVPlayer / AVAudioMix issue. + "Tele Quebec (HLS)" : URL(string: "https://mnmedias.api.telequebec.tv/m3u8/29880.m3u8")!, + // Video only source, but at 30 fps which is the max frame rate that we can capture. + "Telecom ParisTech, GPAC (720p30)" : URL(string: "https://download.tsi.telecom-paristech.fr/gpac/dataset/dash/uhd/mux_sources/hevcds_720p30_2M.mp4")!, + "Telecom ParisTech, GPAC (1080p30)" : URL(string: "https://download.tsi.telecom-paristech.fr/gpac/dataset/dash/uhd/mux_sources/hevcds_1080p30_6M.mp4")!, + "Twilio: What is Cloud Communications? (1080p24, 44.1 kHz)" : URL(string: "https://s3-us-west-1.amazonaws.com/avplayervideo/What+Is+Cloud+Communications.mov")! + ] + static let kRemoteContentURL = kRemoteContentUrls["Mississippi Grind (720p24, 44.1 kHz)"]! + + override func viewDidLoad() { + super.viewDidLoad() + + let red = UIColor(red: 226.0/255.0, + green: 29.0/255.0, + blue: 37.0/255.0, + alpha: 1.0) + + connectButton.backgroundColor = red + self.hangupButton.backgroundColor = red + self.hangupButton.titleLabel?.textColor = UIColor.white + self.hangupButton.isHidden = true + self.audioMixingSwitch.isHidden = true + + connectButton.layer.cornerRadius = 4; + hangupButton.layer.cornerRadius = 2; + + self.localView.contentMode = UIView.ContentMode.scaleAspectFit + self.localView.delegate = self + self.localWidthConstraint = self.localView.constraints.first + self.localHeightConstraint = self.localView.constraints.last + self.remoteView.contentMode = UIView.ContentMode.scaleAspectFit + self.remoteView.delegate = self + self.remoteHeightConstraint = self.remoteView.constraints.first + self.remoteWidthConstraint = self.remoteView.constraints.last + + if let videoUrl = videoPlayerUrl { + connect(contentUrl: videoUrl) + } + let tap = UITapGestureRecognizer(target: self, action: #selector(ViewController.dismissKeyboard)) + self.view.addGestureRecognizer(tap) + + self.dismissKeyboard() + } + + override func viewWillAppear(_ animated: Bool) { + super.viewWillAppear(animated) + } + + override func viewWillLayoutSubviews() { + super.viewWillLayoutSubviews() + + if let playerView = videoPlayerView { + playerView.frame = CGRect(origin: CGPoint.zero, size: self.view.bounds.size) + } + if let remotePlayerView = remotePlayerView { + remotePlayerView.frame = CGRect(origin: CGPoint.zero, size: self.view.bounds.size) + } + } + + override func updateViewConstraints() { + super.updateViewConstraints() + + if self.localView.hasVideoData { + let localDimensions = self.localView.videoDimensions + if localDimensions.width > localDimensions.height { + self.localWidthConstraint?.constant = 128 + self.localHeightConstraint?.constant = 96 + } else { + self.localWidthConstraint?.constant = 96 + self.localHeightConstraint?.constant = 128 + } + } + + if self.remoteView.hasVideoData { + let remoteDimensions = self.remoteView.videoDimensions + if remoteDimensions.width > remoteDimensions.height { + self.remoteWidthConstraint?.constant = 128 + self.remoteHeightConstraint?.constant = 96 + } else { + self.remoteWidthConstraint?.constant = 96 + self.remoteHeightConstraint?.constant = 128 + } + } + } + + @objc func dismissKeyboard() { + if (self.roomTextField.isFirstResponder) { + self.roomTextField.resignFirstResponder() + } + } + + override var prefersHomeIndicatorAutoHidden: Bool { + get { + return self.room != nil + } + } + + override var prefersStatusBarHidden: Bool { + get { + return self.room != nil + } + } + + @IBAction func connect(_ sender: Any) { + dismissKeyboard() + connect(contentUrl: ViewController.kRemoteContentURL) + } + + @IBAction func toggleAudioMixing(_ sender: Any) { + // TODO: Pause/stop audio mixing on demand + } + + public func connect(contentUrl: URL) { + videoPlayerUrl = contentUrl + if self.isViewLoaded == false { + return + } + + if self.audioDevice == nil { + let device = ExampleAVPlayerAudioDevice() + TwilioVideo.audioDevice = device + self.audioDevice = device + } + connect(name: "presenter") + } + + @IBAction func hangup(_ sender: Any) { + self.room?.disconnect() + } + + func logMessage(messageText: String) { + print(messageText) + } + + func connect(name: String) { + // Configure access token either from server or manually. + // If the default wasn't changed, try fetching from server. + if (accessToken == "TWILIO_ACCESS_TOKEN") { + let urlStringWithRole = tokenUrl + "?identity=" + name + do { + accessToken = try String(contentsOf:URL(string: urlStringWithRole)!) + } catch { + let message = "Failed to fetch access token" + print(message) + return + } + } + + // Prepare local media which we will share with Room Participants. + self.prepareLocalMedia() + // Preparing the connect options with the access token that we fetched (or hardcoded). + let connectOptions = TVIConnectOptions.init(token: accessToken) { (builder) in + + // Use the local media that we prepared earlier. + builder.videoTracks = self.localVideoTrack != nil ? [self.localVideoTrack!] : [] + builder.audioTracks = self.localAudioTrack != nil ? [self.localAudioTrack!] : [TVILocalAudioTrack]() + + // The name of the Room where the Client will attempt to connect to. Please note that if you pass an empty + // Room `name`, the Client will create one for you. You can get the name or sid from any connected Room. + builder.roomName = "twilio" + + // Using more bandwidth for presenter audio. + builder.encodingParameters = TVIEncodingParameters(audioBitrate: 1024 * 96, videoBitrate: 0) + } + + // Connect to the Room using the options we provided. + room = TwilioVideo.connect(with: connectOptions, delegate: self) + print("Attempting to connect to:", connectOptions.roomName as Any) + + self.showRoomUI(inRoom: true) + } + + func prepareLocalMedia() { + // All Participants share local audio and video when they connect to the Room. + // Create an audio track. + if (localAudioTrack == nil) { + localAudioTrack = TVILocalAudioTrack.init() + + if (localAudioTrack == nil) { + print("Failed to create audio track") + } + } + + // Create a camera video Track. + #if !targetEnvironment(simulator) + let frontCamera = TVICameraSource.captureDevice(for: .front) + let backCamera = TVICameraSource.captureDevice(for: .back) + + if (frontCamera != nil || backCamera != nil) { + // Preview our local camera track in the local video preview view. + camera = TVICameraSource(delegate: self) + localVideoTrack = TVILocalVideoTrack.init(source: camera!, enabled: true, name: "Camera") + + // Add renderer to video track for local preview + localVideoTrack!.addRenderer(self.localView) + logMessage(messageText: "Video track created") + + if (frontCamera != nil && backCamera != nil) { + // We will flip camera on tap. + let tap = UITapGestureRecognizer(target: self, action: #selector(ViewController.flipCamera)) + self.localView.addGestureRecognizer(tap) + } + + camera!.startCapture(with: frontCamera != nil ? frontCamera! : backCamera!) { (captureDevice, videoFormat, error) in + if let error = error { + self.logMessage(messageText: "Capture failed with error.\ncode = \((error as NSError).code) error = \(error.localizedDescription)") + } else { + self.localView.shouldMirror = (captureDevice.position == .front) + } + } + } + else { + self.logMessage(messageText:"No front or back capture device found!") + } + #else + localAudioTrack.isEnabled = false + #endif + } + + @objc func flipCamera() { + var newDevice: AVCaptureDevice? + + if let camera = self.camera, let captureDevice = camera.device { + if captureDevice.position == .front { + newDevice = TVICameraSource.captureDevice(for: .back) + } else { + newDevice = TVICameraSource.captureDevice(for: .front) + } + + if let newDevice = newDevice { + camera.select(newDevice) { (captureDevice, videoFormat, error) in + if let error = error { + self.logMessage(messageText: "Error selecting capture device.\ncode = \((error as NSError).code) error = \(error.localizedDescription)") + } else { + self.localView.shouldMirror = (captureDevice.position == .front) + } + } + } + } + } + + func showRoomUI(inRoom: Bool) { + self.hangupButton.isHidden = !inRoom + // TODO: show audioMixingSwitch when connected to a Room. + self.localView.isHidden = !inRoom + self.remoteView.isHidden = !inRoom + self.connectButton.isHidden = inRoom + self.greyLine.isHidden = inRoom + self.roomTextField.isHidden = inRoom + self.roomLabel.isHidden = inRoom + self.setNeedsUpdateOfHomeIndicatorAutoHidden() + self.setNeedsStatusBarAppearanceUpdate() + UIApplication.shared.isIdleTimerDisabled = inRoom + + if inRoom == false { + UIView.animate(withDuration: 0.2) { + self.view.backgroundColor = .white + } + } + } + + func startVideoPlayer() { + if let player = self.videoPlayer { + player.play() + return + } + + let asset = AVAsset(url: videoPlayerUrl!) + let assetKeysToPreload = [ + "hasProtectedContent", + "playable", + "tracks" + ] + print("Created asset with tracks:", asset.tracks as Any) + + let playerItem = AVPlayerItem(asset: asset, automaticallyLoadedAssetKeys: assetKeysToPreload) + // Prevent excessive resource usage when the content is HLS. We will downscale large progressively streamed content. + playerItem.preferredMaximumResolution = ExampleAVPlayerSource.kFrameOutputMaxRect.size + // Register as an observer of the player item's status property + playerItem.addObserver(self, + forKeyPath: #keyPath(AVPlayerItem.status), + options: [.old, .new], + context: nil) + + playerItem.addObserver(self, + forKeyPath: #keyPath(AVPlayerItem.tracks), + options: [.old, .new], + context: nil) + + let player = AVPlayer(playerItem: playerItem) + player.volume = Float(0) + player.automaticallyWaitsToMinimizeStalling = false + + var audioClock: CMClock? = nil + let status = CMAudioClockCreate(allocator: nil, clockOut: &audioClock) + if (status == noErr) { + player.masterClock = audioClock; + } + videoPlayer = player + + let playerView = ExampleAVPlayerView(frame: CGRect.zero, player: player) + videoPlayerView = playerView + + let tapRecognizer = UITapGestureRecognizer(target: self, action: #selector(handlePlayerTap)) + tapRecognizer.numberOfTapsRequired = 2 + videoPlayerView?.addGestureRecognizer(tapRecognizer) + + // We will rely on frame based layout to size and position `self.videoPlayerView`. + self.view.insertSubview(playerView, at: 0) + self.view.setNeedsLayout() + UIView.animate(withDuration: 0.2) { + self.view.backgroundColor = UIColor.black + } + } + + @objc func handlePlayerTap(recognizer: UITapGestureRecognizer) { + if let view = self.videoPlayerView { + view.contentMode = view.contentMode == .scaleAspectFit ? .scaleAspectFill : .scaleAspectFit + } + } + + func setupRemoteVideoPlayer(videoTrack: TVIRemoteVideoTrack) { + guard let view = TVIVideoView(frame: self.view.bounds, delegate: nil) else { + return + } + view.contentMode = UIView.ContentMode.scaleAspectFit + videoTrack.addRenderer(view) + self.remotePlayerView = view + self.view.insertSubview(view, at: 0) + self.view.setNeedsLayout() + UIView.animate(withDuration: 0.2) { + self.view.backgroundColor = UIColor.black + } + } + + func setupVideoSource(item: AVPlayerItem) { + videoPlayerSource = ExampleAVPlayerSource(item: item) + + // Create and publish video track. + if let track = TVILocalVideoTrack(capturer: videoPlayerSource!, + enabled: true, + constraints: nil, + name: ViewController.kPlayerTrackName) { + playerVideoTrack = track + self.room!.localParticipant!.publishVideoTrack(track) + } + } + + func setupAudioMix(player: AVPlayer, playerItem: AVPlayerItem) { + guard let audioAssetTrack = firstAudioAssetTrack(playerItem: playerItem) else { + return + } + print("Setup audio mix with AudioAssetTrack, Id:", audioAssetTrack.trackID as Any, "\n", + "Asset:", audioAssetTrack.asset as Any, "\n", + "Audio Fallbacks:", audioAssetTrack.associatedTracks(ofType: AVAssetTrack.AssociationType.audioFallback), "\n", + "isPlayable:", audioAssetTrack.isPlayable) + + let audioMix = AVMutableAudioMix() + + let inputParameters = AVMutableAudioMixInputParameters(track: audioAssetTrack) + // TODO: Is memory management of the MTAudioProcessingTap correct? + inputParameters.audioTapProcessor = audioDevice!.createProcessingTap()?.takeUnretainedValue() + audioMix.inputParameters = [inputParameters] + playerItem.audioMix = audioMix + } + + func firstAudioAssetTrack(playerItem: AVPlayerItem) -> AVAssetTrack? { + var audioAssetTracks: [AVAssetTrack] = [] + for playerItemTrack in playerItem.tracks { + if let assetTrack = playerItemTrack.assetTrack, + assetTrack.mediaType == AVMediaType.audio { + audioAssetTracks.append(assetTrack) + } + } + return audioAssetTracks.first + } + + func updateAudioMixParameters(playerItem: AVPlayerItem) { + // Update the audio mix to point to the first AVAssetTrack that we find. + if let audioAssetTrack = firstAudioAssetTrack(playerItem: playerItem), + let inputParameters = playerItem.audioMix?.inputParameters.first { + let mutableInputParameters = inputParameters as! AVMutableAudioMixInputParameters + mutableInputParameters.trackID = audioAssetTrack.trackID + print("Update the mix input parameters to use Track Id:", audioAssetTrack.trackID as Any, "\n", + "Asset:", audioAssetTrack.asset as Any, "\n", + "Audio Fallbacks:", audioAssetTrack.associatedTracks(ofType: AVAssetTrack.AssociationType.audioFallback), "\n", + "isPlayable:", audioAssetTrack.isPlayable) + } else { + // TODO + } + } + + func stopVideoPlayer() { + print(#function) + + videoPlayer?.pause() + videoPlayer?.currentItem?.removeObserver(self, forKeyPath: #keyPath(AVPlayerItem.status)) + videoPlayer?.currentItem?.removeObserver(self, forKeyPath: #keyPath(AVPlayerItem.tracks)) + videoPlayer?.currentItem?.remove((videoPlayerSource?.videoOutput)!) + videoPlayer?.currentItem?.audioMix = nil + videoPlayer?.replaceCurrentItem(with: nil) + videoPlayer = nil + + // TODO: Unpublish player video. + + // Remove player UI + videoPlayerView?.removeFromSuperview() + videoPlayerView = nil + } + + func prerollVideoPlayer() { + print("Preparing to play asset with Tracks:", videoPlayer?.currentItem?.asset.tracks as Any) + + videoPlayerPreroll = true + videoPlayer?.preroll(atRate: 1.0, completionHandler: { (success) in + if (success) { + // Start audio and video playback at a time synchronized with both parties. + // let now = CMClockGetTime(CMClockGetHostTimeClock()) + let now = CMClockGetTime((self.videoPlayer?.masterClock)!) + let start = now + CMTime(seconds: ViewController.kPrerollDuration, preferredTimescale: now.timescale) + + let audioAssetTrack = self.firstAudioAssetTrack(playerItem: (self.videoPlayer?.currentItem)!) + var range = CMTimeRange.invalid + if let assetTrack = audioAssetTrack { + range = assetTrack.timeRange + } + + print("Pre-roll success for item:", self.videoPlayer?.currentItem as Any, "\n", + "Current time:", self.videoPlayer?.currentItem?.currentTime() as Any, "\n", + "Audio asset range:", range as Any, "\n", + "\nStarting at:", start.seconds) + self.videoPlayer?.setRate(1.0, time: CMTime.invalid, atHostTime: start) + self.audioDevice?.startAudioTap(at: start) + } else { + print("Pre-roll failed, waiting to try again ...") + self.videoPlayerPreroll = false + } + }) + } + + override func observeValue(forKeyPath keyPath: String?, + of object: Any?, + change: [NSKeyValueChangeKey : Any]?, + context: UnsafeMutableRawPointer?) { + + if keyPath == #keyPath(AVPlayerItem.status) { + let status: AVPlayerItem.Status + + // Get the status change from the change dictionary + if let statusNumber = change?[.newKey] as? NSNumber { + status = AVPlayerItem.Status(rawValue: statusNumber.intValue)! + } else { + status = .unknown + } + + // Switch over the status + switch status { + case .readyToPlay: + // Player item is ready to play. + print("Ready to play asset.") + // Defer video source setup until we've loaded the asset so that we can determine downscaling for progressive streaming content. + if self.videoPlayerSource == nil { + setupVideoSource(item: object as! AVPlayerItem) + } + + if videoPlayer?.rate == 0 && + videoPlayerPreroll == false { + self.prerollVideoPlayer() + } + break + case .failed: + // Player item failed. See error. + // TODO: Show in the UI. + print("Playback failed with error:", videoPlayer?.currentItem?.error as Any) + break + case .unknown: + // Player item is not yet ready. + print("Player item status is unknown.") + break + } + } else if keyPath == #keyPath(AVPlayerItem.tracks) { + let playerItem = object as! AVPlayerItem + print("Player item tracks are:", playerItem.tracks as Any) + + // Configure our audio capturer to receive audio samples from the AVPlayerItem. + if playerItem.audioMix == nil, + firstAudioAssetTrack(playerItem: playerItem) != nil { + setupAudioMix(player: videoPlayer!, playerItem: playerItem) + } else { + // TODO: Possibly update the existing mix for HLS? + // This doesn't seem to fix the tap bug, nor does deferring mix creation. +// updateAudioMixParameters(playerItem: playerItem) + } + } + } +} + +// MARK: TVIRoomDelegate +extension ViewController : TVIRoomDelegate { + func didConnect(to room: TVIRoom) { + + // Listen to events from existing `TVIRemoteParticipant`s + for remoteParticipant in room.remoteParticipants { + remoteParticipant.delegate = self + } + + if (room.remoteParticipants.count > 0) { + stopVideoPlayer() + startVideoPlayer() + } + + let connectMessage = "Connected to room \(room.name) as \(room.localParticipant?.identity ?? "")." + logMessage(messageText: connectMessage) + + self.showRoomUI(inRoom: true) + } + + func room(_ room: TVIRoom, didDisconnectWithError error: Error?) { + if let disconnectError = error { + logMessage(messageText: "Disconnected from \(room.name).\ncode = \((disconnectError as NSError).code) error = \(disconnectError.localizedDescription)") + } else { + logMessage(messageText: "Disconnected from \(room.name)") + } + + stopVideoPlayer() + self.localVideoTrack = nil + self.localAudioTrack = nil + self.playerVideoTrack = nil + self.videoPlayerSource = nil + self.room = nil + self.showRoomUI(inRoom: false) + self.accessToken = "TWILIO_ACCESS_TOKEN" + } + + func room(_ room: TVIRoom, didFailToConnectWithError error: Error) { + logMessage(messageText: "Failed to connect to Room:\n\(error.localizedDescription)") + + self.room = nil + self.localVideoTrack = nil + self.localAudioTrack = nil + self.showRoomUI(inRoom: false) + self.accessToken = "TWILIO_ACCESS_TOKEN" + } + + func room(_ room: TVIRoom, participantDidConnect participant: TVIRemoteParticipant) { + participant.delegate = self + + logMessage(messageText: "Participant \(participant.identity) connected with \(participant.remoteAudioTracks.count) audio and \(participant.remoteVideoTracks.count) video tracks") + + if (room.remoteParticipants.count == 1) { + stopVideoPlayer() + startVideoPlayer() + } + } + + func room(_ room: TVIRoom, participantDidDisconnect participant: TVIRemoteParticipant) { + logMessage(messageText: "Room \(room.name), Participant \(participant.identity) disconnected") + } +} + +// MARK: TVIRemoteParticipantDelegate +extension ViewController : TVIRemoteParticipantDelegate { + + func remoteParticipant(_ participant: TVIRemoteParticipant, + publishedVideoTrack publication: TVIRemoteVideoTrackPublication) { + + // Remote Participant has offered to share the video Track. + + logMessage(messageText: "Participant \(participant.identity) published \(publication.trackName) video track") + } + + func remoteParticipant(_ participant: TVIRemoteParticipant, + unpublishedVideoTrack publication: TVIRemoteVideoTrackPublication) { + + // Remote Participant has stopped sharing the video Track. + + logMessage(messageText: "Participant \(participant.identity) unpublished \(publication.trackName) video track") + } + + func remoteParticipant(_ participant: TVIRemoteParticipant, + publishedAudioTrack publication: TVIRemoteAudioTrackPublication) { + + // Remote Participant has offered to share the audio Track. + + logMessage(messageText: "Participant \(participant.identity) published \(publication.trackName) audio track") + } + + func remoteParticipant(_ participant: TVIRemoteParticipant, + unpublishedAudioTrack publication: TVIRemoteAudioTrackPublication) { + + // Remote Participant has stopped sharing the audio Track. + + logMessage(messageText: "Participant \(participant.identity) unpublished \(publication.trackName) audio track") + } + + func subscribed(to videoTrack: TVIRemoteVideoTrack, + publication: TVIRemoteVideoTrackPublication, + for participant: TVIRemoteParticipant) { + + // We are subscribed to the remote Participant's video Track. We will start receiving the + // remote Participant's video frames now. + + logMessage(messageText: "Subscribed to \(publication.trackName) video track for Participant \(participant.identity)") + + // Start remote rendering. + if (videoTrack.name == ViewController.kPlayerTrackName) { + setupRemoteVideoPlayer(videoTrack: videoTrack) + } else { + videoTrack.addRenderer(self.remoteView) + } + } + + func unsubscribed(from videoTrack: TVIRemoteVideoTrack, + publication: TVIRemoteVideoTrackPublication, + for participant: TVIRemoteParticipant) { + + // We are unsubscribed from the remote Participant's video Track. We will no longer receive the + // remote Participant's video. + + logMessage(messageText: "Unsubscribed from \(publication.trackName) video track for Participant \(participant.identity)") + + let renderers = videoTrack.renderers + let hasRemotePlayerView = renderers.contains { (renderer) -> Bool in + return renderer.isEqual(self.remotePlayerView) + } + let hasRemoteView = renderers.contains { (renderer) -> Bool in + return renderer.isEqual(self.remoteView) + } + + // Stop remote rendering. + if hasRemotePlayerView, + let playerView = self.remotePlayerView { + videoTrack.removeRenderer(playerView) + playerView.removeFromSuperview() + self.remotePlayerView = nil + } else if hasRemoteView { + videoTrack.removeRenderer(self.remoteView) + } + } + + func subscribed(to audioTrack: TVIRemoteAudioTrack, + publication: TVIRemoteAudioTrackPublication, + for participant: TVIRemoteParticipant) { + + // We are subscribed to the remote Participant's audio Track. We will start receiving the + // remote Participant's audio now. + + logMessage(messageText: "Subscribed to \(publication.trackName) audio track for Participant \(participant.identity)") + } + + func unsubscribed(from audioTrack: TVIRemoteAudioTrack, + publication: TVIRemoteAudioTrackPublication, + for participant: TVIRemoteParticipant) { + + // We are unsubscribed from the remote Participant's audio Track. We will no longer receive the + // remote Participant's audio. + + logMessage(messageText: "Unsubscribed from \(publication.trackName) audio track for Participant \(participant.identity)") + } + + func remoteParticipant(_ participant: TVIRemoteParticipant, + enabledVideoTrack publication: TVIRemoteVideoTrackPublication) { + logMessage(messageText: "Participant \(participant.identity) enabled \(publication.trackName) video track") + } + + func remoteParticipant(_ participant: TVIRemoteParticipant, + disabledVideoTrack publication: TVIRemoteVideoTrackPublication) { + logMessage(messageText: "Participant \(participant.identity) disabled \(publication.trackName) video track") + } + + func remoteParticipant(_ participant: TVIRemoteParticipant, + enabledAudioTrack publication: TVIRemoteAudioTrackPublication) { + logMessage(messageText: "Participant \(participant.identity) enabled \(publication.trackName) audio track") + } + + func remoteParticipant(_ participant: TVIRemoteParticipant, + disabledAudioTrack publication: TVIRemoteAudioTrackPublication) { + // We will continue to record silence and/or recognize audio while a Track is disabled. + logMessage(messageText: "Participant \(participant.identity) disabled \(publication.trackName) audio track") + } + + func failedToSubscribe(toAudioTrack publication: TVIRemoteAudioTrackPublication, + error: Error, + for participant: TVIRemoteParticipant) { + logMessage(messageText: "FailedToSubscribe \(publication.trackName) audio track, error = \(String(describing: error))") + } + + func failedToSubscribe(toVideoTrack publication: TVIRemoteVideoTrackPublication, + error: Error, + for participant: TVIRemoteParticipant) { + logMessage(messageText: "FailedToSubscribe \(publication.trackName) video track, error = \(String(describing: error))") + } +} + +extension ViewController : TVICameraCapturerDelegate { + func cameraCapturer(_ capturer: TVICameraCapturer, didStartWith source: TVICameraCaptureSource) { + // Layout the camera preview with dimensions appropriate for our orientation. + self.view.setNeedsLayout() + } + + func cameraCapturer(_ capturer: TVICameraCapturer, didFailWithError error: Error) { + logMessage(messageText: "Capture failed with error.\ncode = \((error as NSError).code) error = \(error.localizedDescription)") + capturer.previewView.removeFromSuperview() + } +} + +extension ViewController : TVIVideoViewDelegate { + func videoViewDidReceiveData(_ view: TVIVideoView) { + if view == self.localView || view == self.remoteView { + self.view.setNeedsUpdateConstraints() + } + } + func videoView(_ view: TVIVideoView, videoDimensionsDidChange dimensions: CMVideoDimensions) { + if view == self.localView || view == self.remoteView { + self.view.setNeedsUpdateConstraints() + } + } +} + +// MARK: TVICameraSourceDelegate +extension ViewController : TVICameraSourceDelegate { + func cameraSource(_ source: TVICameraSource, didFailWithError error: Error) { + logMessage(messageText: "Camera source failed with error: \(error.localizedDescription)") + } +} diff --git a/AVPlayerExample/main.m b/AVPlayerExample/main.m deleted file mode 100644 index 66f07492..00000000 --- a/AVPlayerExample/main.m +++ /dev/null @@ -1,16 +0,0 @@ -// -// main.m -// AVPlayerExample -// -// Created by Chris Eagleston on 5/16/17. -// Copyright © 2017 Twilio Inc. All rights reserved. -// - -#import -#import "AppDelegate.h" - -int main(int argc, char * argv[]) { - @autoreleasepool { - return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); - } -} diff --git a/Podfile b/Podfile index 7eb0aeb8..2bbc63cd 100644 --- a/Podfile +++ b/Podfile @@ -63,6 +63,8 @@ abstract_target 'TwilioVideo' do target 'AVPlayerExample' do platform :ios, '9.0' project 'AVPlayerExample.xcproject' + + pod 'TPCircularBuffer', '~> 1.6' end end